aboutsummaryrefslogtreecommitdiff
path: root/examples/src
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2014-01-13 23:23:46 -0800
committerTathagata Das <tathagata.das1565@gmail.com>2014-01-13 23:23:46 -0800
commit4e497db8f3826cf5142b2165a08d02c6f3c2cd90 (patch)
tree9ec25e86ccf8986035215e51f7b0e1ba1b96dad6 /examples/src
parent1233b3de01be1ff57910786f5f3e2e2a23e228ab (diff)
downloadspark-4e497db8f3826cf5142b2165a08d02c6f3c2cd90.tar.gz
spark-4e497db8f3826cf5142b2165a08d02c6f3c2cd90.tar.bz2
spark-4e497db8f3826cf5142b2165a08d02c6f3c2cd90.zip
Removed StreamingContext.registerInputStream and registerOutputStream - they were useless as InputDStream has been made to register itself. Also made DStream.register() private[streaming] - not useful to expose the confusing function. Updated a lot of documentation.
Diffstat (limited to 'examples/src')
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala3
1 files changed, 2 insertions, 1 deletions
diff --git a/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
index 25f7013307..0226475712 100644
--- a/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
@@ -19,6 +19,7 @@ package org.apache.spark.streaming.examples
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.storage.StorageLevel
/**
* Counts words in text encoded with UTF8 received from the network every second.
@@ -48,7 +49,7 @@ object NetworkWordCount {
// Create a NetworkInputDStream on target ip:port and count the
// words in input stream of \n delimited text (eg. generated by 'nc')
- val lines = ssc.socketTextStream(args(1), args(2).toInt)
+ val lines = ssc.socketTextStream(args(1), args(2).toInt, StorageLevel.MEMORY_ONLY_SER)
val words = lines.flatMap(_.split(" "))
val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _)
wordCounts.print()