From 28f8b721f65fc8e699f208c5dc64d90822a85d91 Mon Sep 17 00:00:00 2001 From: Tathagata Das Date: Sun, 24 Feb 2013 13:01:54 -0800 Subject: Added back the initial spark job before starting streaming receivers --- streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'streaming') diff --git a/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala b/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala index 64972fd5cd..b159d26c02 100644 --- a/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala +++ b/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala @@ -141,7 +141,7 @@ class NetworkInputTracker( } // Run the dummy Spark job to ensure that all slaves have registered. // This avoids all the receivers to be scheduled on the same node. - //ssc.sparkContext.makeRDD(1 to 100, 100).map(x => (x, 1)).reduceByKey(_ + _, 20).collect() + ssc.sparkContext.makeRDD(1 to 50, 50).map(x => (x, 1)).reduceByKey(_ + _, 20).collect() // Distribute the receivers and start them ssc.sparkContext.runJob(tempRDD, startReceiver) -- cgit v1.2.3