[SPARK-12000] Fix API doc generation issues

This pull request fixes multiple issues with API doc generation.

- Modify the Jekyll plugin so that the entire doc build fails if API docs cannot be generated. This will make it easy to detect when the doc build breaks, since this will now trigger Jenkins failures.
- Change how we handle the `-target` compiler option flag in order to fix `javadoc` generation.
- Incorporate doc changes from thunterdb (in #10048).

Closes #10048.

Author: Josh Rosen <joshrosen@databricks.com>
Author: Timothy Hunter <timhunter@databricks.com>

Closes #10049 from JoshRosen/fix-doc-build.
This commit is contained in:
Josh Rosen 2015-11-30 16:37:27 -08:00
parent edb26e7f4e
commit d3ca8cfac2
4 changed files with 14 additions and 9 deletions

View file

@ -27,7 +27,7 @@ if not (ENV['SKIP_API'] == '1')
cd("..")
puts "Running 'build/sbt -Pkinesis-asl clean compile unidoc' from " + pwd + "; this may take a few minutes..."
puts `build/sbt -Pkinesis-asl clean compile unidoc`
system("build/sbt -Pkinesis-asl clean compile unidoc") || raise("Unidoc generation failed")
puts "Moving back into docs dir."
cd("docs")
@ -117,7 +117,7 @@ if not (ENV['SKIP_API'] == '1')
puts "Moving to python/docs directory and building sphinx."
cd("../python/docs")
puts `make html`
system(make html) || raise("Python doc generation failed")
puts "Moving back into home dir."
cd("../../")
@ -131,7 +131,7 @@ if not (ENV['SKIP_API'] == '1')
# Build SparkR API docs
puts "Moving to R directory and building roxygen docs."
cd("R")
puts `./create-docs.sh`
system("./create-docs.sh") || raise("R doc generation failed")
puts "Moving back into home dir."
cd("../")

View file

@ -21,8 +21,8 @@ import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Callback for streaming data. Stream data will be offered to the {@link onData(ByteBuffer)}
* method as it arrives. Once all the stream data is received, {@link onComplete()} will be
* Callback for streaming data. Stream data will be offered to the {@link onData(String, ByteBuffer)}
* method as it arrives. Once all the stream data is received, {@link onComplete(String)} will be
* called.
* <p>
* The network library guarantees that a single thread will call these methods at a time, but

View file

@ -55,7 +55,7 @@ public abstract class RpcHandler {
/**
* Receives an RPC message that does not expect a reply. The default implementation will
* call "{@link receive(TransportClient, byte[], RpcResponseCallback}" and log a warning if
* call "{@link receive(TransportClient, byte[], RpcResponseCallback)}" and log a warning if
* any of the callback methods are called.
*
* @param client A channel client which enables the handler to make requests back to the sender

View file

@ -160,7 +160,12 @@ object SparkBuild extends PomBuild {
javacOptions in Compile ++= Seq(
"-encoding", "UTF-8",
"-source", javacJVMVersion.value,
"-source", javacJVMVersion.value
),
// This -target option cannot be set in the Compile configuration scope since `javadoc` doesn't
// play nicely with it; see https://github.com/sbt/sbt/issues/355#issuecomment-3817629 for
// additional discussion and explanation.
javacOptions in (Compile, compile) ++= Seq(
"-target", javacJVMVersion.value
),
@ -547,9 +552,9 @@ object Unidoc {
publish := {},
unidocProjectFilter in(ScalaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn),
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
unidocProjectFilter in(JavaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, bagel, examples, tools, streamingFlumeSink, yarn),
inAnyProject -- inProjects(OldDeps.project, repl, bagel, examples, tools, streamingFlumeSink, yarn, testTags),
// Skip actual catalyst, but include the subproject.
// Catalyst is not public API and contains quasiquotes which break scaladoc.