aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/_plugins/copy_api_dirs.rb6
-rw-r--r--network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java4
-rw-r--r--network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java2
-rw-r--r--project/SparkBuild.scala11
4 files changed, 14 insertions, 9 deletions
diff --git a/docs/_plugins/copy_api_dirs.rb b/docs/_plugins/copy_api_dirs.rb
index 01718d98df..f2f3e2e653 100644
--- a/docs/_plugins/copy_api_dirs.rb
+++ b/docs/_plugins/copy_api_dirs.rb
@@ -27,7 +27,7 @@ if not (ENV['SKIP_API'] == '1')
cd("..")
puts "Running 'build/sbt -Pkinesis-asl clean compile unidoc' from " + pwd + "; this may take a few minutes..."
- puts `build/sbt -Pkinesis-asl clean compile unidoc`
+ system("build/sbt -Pkinesis-asl clean compile unidoc") || raise("Unidoc generation failed")
puts "Moving back into docs dir."
cd("docs")
@@ -117,7 +117,7 @@ if not (ENV['SKIP_API'] == '1')
puts "Moving to python/docs directory and building sphinx."
cd("../python/docs")
- puts `make html`
+ system(make html) || raise("Python doc generation failed")
puts "Moving back into home dir."
cd("../../")
@@ -131,7 +131,7 @@ if not (ENV['SKIP_API'] == '1')
# Build SparkR API docs
puts "Moving to R directory and building roxygen docs."
cd("R")
- puts `./create-docs.sh`
+ system("./create-docs.sh") || raise("R doc generation failed")
puts "Moving back into home dir."
cd("../")
diff --git a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
index 093fada320..51d34cac6e 100644
--- a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
+++ b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
@@ -21,8 +21,8 @@ import java.io.IOException;
import java.nio.ByteBuffer;
/**
- * Callback for streaming data. Stream data will be offered to the {@link onData(ByteBuffer)}
- * method as it arrives. Once all the stream data is received, {@link onComplete()} will be
+ * Callback for streaming data. Stream data will be offered to the {@link onData(String, ByteBuffer)}
+ * method as it arrives. Once all the stream data is received, {@link onComplete(String)} will be
* called.
* <p>
* The network library guarantees that a single thread will call these methods at a time, but
diff --git a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
index 65109ddfe1..1a11f7b382 100644
--- a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
+++ b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
@@ -55,7 +55,7 @@ public abstract class RpcHandler {
/**
* Receives an RPC message that does not expect a reply. The default implementation will
- * call "{@link receive(TransportClient, byte[], RpcResponseCallback}" and log a warning if
+ * call "{@link receive(TransportClient, byte[], RpcResponseCallback)}" and log a warning if
* any of the callback methods are called.
*
* @param client A channel client which enables the handler to make requests back to the sender
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index f575f0012d..63290d8a66 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -160,7 +160,12 @@ object SparkBuild extends PomBuild {
javacOptions in Compile ++= Seq(
"-encoding", "UTF-8",
- "-source", javacJVMVersion.value,
+ "-source", javacJVMVersion.value
+ ),
+ // This -target option cannot be set in the Compile configuration scope since `javadoc` doesn't
+ // play nicely with it; see https://github.com/sbt/sbt/issues/355#issuecomment-3817629 for
+ // additional discussion and explanation.
+ javacOptions in (Compile, compile) ++= Seq(
"-target", javacJVMVersion.value
),
@@ -547,9 +552,9 @@ object Unidoc {
publish := {},
unidocProjectFilter in(ScalaUnidoc, unidoc) :=
- inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn),
+ inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
unidocProjectFilter in(JavaUnidoc, unidoc) :=
- inAnyProject -- inProjects(OldDeps.project, repl, bagel, examples, tools, streamingFlumeSink, yarn),
+ inAnyProject -- inProjects(OldDeps.project, repl, bagel, examples, tools, streamingFlumeSink, yarn, testTags),
// Skip actual catalyst, but include the subproject.
// Catalyst is not public API and contains quasiquotes which break scaladoc.