You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by jo...@apache.org on 2015/12/01 01:37:47 UTC

spark git commit: [SPARK-12000] Fix API doc generation issues

Repository: spark
Updated Branches:
  refs/heads/master edb26e7f4 -> d3ca8cfac


[SPARK-12000] Fix API doc generation issues

This pull request fixes multiple issues with API doc generation.

- Modify the Jekyll plugin so that the entire doc build fails if API docs cannot be generated. This will make it easy to detect when the doc build breaks, since this will now trigger Jenkins failures.
- Change how we handle the `-target` compiler option flag in order to fix `javadoc` generation.
- Incorporate doc changes from thunterdb (in #10048).

Closes #10048.

Author: Josh Rosen <jo...@databricks.com>
Author: Timothy Hunter <ti...@databricks.com>

Closes #10049 from JoshRosen/fix-doc-build.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d3ca8cfa
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d3ca8cfa
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d3ca8cfa

Branch: refs/heads/master
Commit: d3ca8cfac286ae19f8bedc736877ea9d0a0a072c
Parents: edb26e7
Author: Josh Rosen <jo...@databricks.com>
Authored: Mon Nov 30 16:37:27 2015 -0800
Committer: Josh Rosen <jo...@databricks.com>
Committed: Mon Nov 30 16:37:27 2015 -0800

----------------------------------------------------------------------
 docs/_plugins/copy_api_dirs.rb                           |  6 +++---
 .../org/apache/spark/network/client/StreamCallback.java  |  4 ++--
 .../java/org/apache/spark/network/server/RpcHandler.java |  2 +-
 project/SparkBuild.scala                                 | 11 ++++++++---
 4 files changed, 14 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/d3ca8cfa/docs/_plugins/copy_api_dirs.rb
----------------------------------------------------------------------
diff --git a/docs/_plugins/copy_api_dirs.rb b/docs/_plugins/copy_api_dirs.rb
index 01718d9..f2f3e2e 100644
--- a/docs/_plugins/copy_api_dirs.rb
+++ b/docs/_plugins/copy_api_dirs.rb
@@ -27,7 +27,7 @@ if not (ENV['SKIP_API'] == '1')
     cd("..")
 
     puts "Running 'build/sbt -Pkinesis-asl clean compile unidoc' from " + pwd + "; this may take a few minutes..."
-    puts `build/sbt -Pkinesis-asl clean compile unidoc`
+    system("build/sbt -Pkinesis-asl clean compile unidoc") || raise("Unidoc generation failed")
 
     puts "Moving back into docs dir."
     cd("docs")
@@ -117,7 +117,7 @@ if not (ENV['SKIP_API'] == '1')
 
   puts "Moving to python/docs directory and building sphinx."
   cd("../python/docs")
-  puts `make html`
+  system(make html) || raise("Python doc generation failed")
 
   puts "Moving back into home dir."
   cd("../../")
@@ -131,7 +131,7 @@ if not (ENV['SKIP_API'] == '1')
   # Build SparkR API docs
   puts "Moving to R directory and building roxygen docs."
   cd("R")
-  puts `./create-docs.sh`
+  system("./create-docs.sh") || raise("R doc generation failed")
 
   puts "Moving back into home dir."
   cd("../")

http://git-wip-us.apache.org/repos/asf/spark/blob/d3ca8cfa/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
----------------------------------------------------------------------
diff --git a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
index 093fada..51d34ca 100644
--- a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
+++ b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 
 /**
- * Callback for streaming data. Stream data will be offered to the {@link onData(ByteBuffer)}
- * method as it arrives. Once all the stream data is received, {@link onComplete()} will be
+ * Callback for streaming data. Stream data will be offered to the {@link onData(String, ByteBuffer)}
+ * method as it arrives. Once all the stream data is received, {@link onComplete(String)} will be
  * called.
  * <p>
  * The network library guarantees that a single thread will call these methods at a time, but

http://git-wip-us.apache.org/repos/asf/spark/blob/d3ca8cfa/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
----------------------------------------------------------------------
diff --git a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
index 65109dd..1a11f7b 100644
--- a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
+++ b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
@@ -55,7 +55,7 @@ public abstract class RpcHandler {
 
   /**
    * Receives an RPC message that does not expect a reply. The default implementation will
-   * call "{@link receive(TransportClient, byte[], RpcResponseCallback}" and log a warning if
+   * call "{@link receive(TransportClient, byte[], RpcResponseCallback)}" and log a warning if
    * any of the callback methods are called.
    *
    * @param client A channel client which enables the handler to make requests back to the sender

http://git-wip-us.apache.org/repos/asf/spark/blob/d3ca8cfa/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index f575f00..63290d8 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -160,7 +160,12 @@ object SparkBuild extends PomBuild {
 
     javacOptions in Compile ++= Seq(
       "-encoding", "UTF-8",
-      "-source", javacJVMVersion.value,
+      "-source", javacJVMVersion.value
+    ),
+    // This -target option cannot be set in the Compile configuration scope since `javadoc` doesn't
+    // play nicely with it; see https://github.com/sbt/sbt/issues/355#issuecomment-3817629 for
+    // additional discussion and explanation.
+    javacOptions in (Compile, compile) ++= Seq(
       "-target", javacJVMVersion.value
     ),
 
@@ -547,9 +552,9 @@ object Unidoc {
     publish := {},
 
     unidocProjectFilter in(ScalaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn),
+      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
     unidocProjectFilter in(JavaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, bagel, examples, tools, streamingFlumeSink, yarn),
+      inAnyProject -- inProjects(OldDeps.project, repl, bagel, examples, tools, streamingFlumeSink, yarn, testTags),
 
     // Skip actual catalyst, but include the subproject.
     // Catalyst is not public API and contains quasiquotes which break scaladoc.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org