From 87b915f22105ced8b9cad2a1262a0fd26542ee4f Mon Sep 17 00:00:00 2001 From: Tathagata Das Date: Tue, 31 Dec 2013 00:42:10 -0800 Subject: Removed extra empty lines. --- .../main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala | 1 - .../scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala | 1 - .../scala/org/apache/spark/streaming/scheduler/NetworkInputTracker.scala | 1 - 3 files changed, 3 deletions(-) diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala index fd69328aba..a2cd49c573 100644 --- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala +++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala @@ -34,7 +34,6 @@ import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream._ - /** * Input stream that pulls messages from a Kafka Broker. * diff --git a/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala b/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala index 97e48ebeca..5cc721d7f9 100644 --- a/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala +++ b/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala @@ -98,4 +98,3 @@ class TwitterReceiver( logInfo("Twitter receiver stopped") } } - diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/NetworkInputTracker.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/NetworkInputTracker.scala index 4a8e15db21..75f7244643 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/NetworkInputTracker.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/NetworkInputTracker.scala @@ -164,7 +164,6 @@ class NetworkInputTracker( ssc.sparkContext.makeRDD(1 to 50, 50).map(x => (x, 1)).reduceByKey(_ + _, 20).collect() } - // Distribute the receivers and start them ssc.sparkContext.runJob(tempRDD, startReceiver) } -- cgit v1.2.3