From 2d34183b273af1125181f04c49725efc2fa351af Mon Sep 17 00:00:00 2001 From: Matthew Wise Date: Mon, 30 May 2016 09:12:02 -0500 Subject: [DOCS] fix example code issues in documentation ## What changes were proposed in this pull request? Fixed broken java code examples in streaming documentation Attn: tdas Author: Matthew Wise Closes #13388 from mawise/fix_docs_java_streaming_example. --- docs/streaming-kafka-integration.md | 2 +- docs/streaming-programming-guide.md | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'docs') diff --git a/docs/streaming-kafka-integration.md b/docs/streaming-kafka-integration.md index 0f1e32212e..e0d3f4f69b 100644 --- a/docs/streaming-kafka-integration.md +++ b/docs/streaming-kafka-integration.md @@ -111,7 +111,7 @@ Next, we discuss how to use this approach in your streaming application.
import org.apache.spark.streaming.kafka.*; - JavaPairReceiverInputDStream directKafkaStream = + JavaPairInputDStream directKafkaStream = KafkaUtils.createDirectStream(streamingContext, [key class], [value class], [key decoder class], [value decoder class], [map of Kafka parameters], [set of topics to consume]); diff --git a/docs/streaming-programming-guide.md b/docs/streaming-programming-guide.md index d7eafff38f..6550fcc052 100644 --- a/docs/streaming-programming-guide.md +++ b/docs/streaming-programming-guide.md @@ -145,8 +145,8 @@ import org.apache.spark.streaming.api.java.*; import scala.Tuple2; // Create a local StreamingContext with two working thread and batch interval of 1 second -SparkConf conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount") -JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1)) +SparkConf conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount"); +JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1)); {% endhighlight %} Using this context, we can create a DStream that represents streaming data from a TCP -- cgit v1.2.3