aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorBurak Yavuz <brkyvz@gmail.com>2016-06-28 17:02:16 -0700
committerShixiong Zhu <shixiong@databricks.com>2016-06-28 17:02:16 -0700
commit5545b791096756b07b3207fb3de13b68b9a37b00 (patch)
treed48aad07d9dbb8592a01978e9aad1a72572f0685 /sql/catalyst
parent3554713a163c58ca176ffde87d2c6e4a91bacb50 (diff)
downloadspark-5545b791096756b07b3207fb3de13b68b9a37b00.tar.gz
spark-5545b791096756b07b3207fb3de13b68b9a37b00.tar.bz2
spark-5545b791096756b07b3207fb3de13b68b9a37b00.zip
[MINOR][DOCS][STRUCTURED STREAMING] Minor doc fixes around `DataFrameWriter` and `DataStreamWriter`
## What changes were proposed in this pull request? Fixes a couple old references to `DataFrameWriter.startStream` to `DataStreamWriter.start Author: Burak Yavuz <brkyvz@gmail.com> Closes #13952 from brkyvz/minor-doc-fix.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationChecker.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala6
2 files changed, 6 insertions, 6 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationChecker.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationChecker.scala
index 689e016a5a..f6e32e29eb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationChecker.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationChecker.scala
@@ -30,7 +30,7 @@ object UnsupportedOperationChecker {
def checkForBatch(plan: LogicalPlan): Unit = {
plan.foreachUp {
case p if p.isStreaming =>
- throwError("Queries with streaming sources must be executed with write.startStream()")(p)
+ throwError("Queries with streaming sources must be executed with writeStream.start()")(p)
case _ =>
}
@@ -40,7 +40,7 @@ object UnsupportedOperationChecker {
if (!plan.isStreaming) {
throwError(
- "Queries without streaming sources cannot be executed with write.startStream()")(plan)
+ "Queries without streaming sources cannot be executed with writeStream.start()")(plan)
}
// Disallow multiple streaming aggregations
@@ -154,7 +154,7 @@ object UnsupportedOperationChecker {
case ReturnAnswer(child) if child.isStreaming =>
throwError("Cannot return immediate result on streaming DataFrames/Dataset. Queries " +
- "with streaming DataFrames/Datasets must be executed with write.startStream().")
+ "with streaming DataFrames/Datasets must be executed with writeStream.start().")
case _ =>
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala
index c21ad5e03a..6df47acaba 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationsSuite.scala
@@ -53,12 +53,12 @@ class UnsupportedOperationsSuite extends SparkFunSuite {
assertNotSupportedInBatchPlan(
"streaming source",
streamRelation,
- Seq("with streaming source", "startStream"))
+ Seq("with streaming source", "start"))
assertNotSupportedInBatchPlan(
"select on streaming source",
streamRelation.select($"count(*)"),
- Seq("with streaming source", "startStream"))
+ Seq("with streaming source", "start"))
/*
@@ -70,7 +70,7 @@ class UnsupportedOperationsSuite extends SparkFunSuite {
// Batch plan in streaming query
testError(
"streaming plan - no streaming source",
- Seq("without streaming source", "startStream")) {
+ Seq("without streaming source", "start")) {
UnsupportedOperationChecker.checkForStreaming(batchRelation.select($"count(*)"), Append)
}