aboutsummaryrefslogtreecommitdiff
path: root/external
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-05-28 23:00:02 -0700
committerReynold Xin <rxin@databricks.com>2015-05-28 23:00:02 -0700
commit97a60cf75d1fed654953eccedd04f3442389c5ca (patch)
tree2b681949925a5acb86ff925ee2c86b9c2d6f867c /external
parent36067ce398e2949c2f122625e67fd5497febdee6 (diff)
downloadspark-97a60cf75d1fed654953eccedd04f3442389c5ca.tar.gz
spark-97a60cf75d1fed654953eccedd04f3442389c5ca.tar.bz2
spark-97a60cf75d1fed654953eccedd04f3442389c5ca.zip
[SPARK-7929] Turn whitespace checker on for more token types.
This is the last batch of changes to complete SPARK-7929. Previous related PRs: https://github.com/apache/spark/pull/6480 https://github.com/apache/spark/pull/6478 https://github.com/apache/spark/pull/6477 https://github.com/apache/spark/pull/6476 https://github.com/apache/spark/pull/6475 https://github.com/apache/spark/pull/6474 https://github.com/apache/spark/pull/6473 Author: Reynold Xin <rxin@databricks.com> Closes #6487 from rxin/whitespace-lint and squashes the following commits: b33d43d [Reynold Xin] [SPARK-7929] Turn whitespace checker on for more token types.
Diffstat (limited to 'external')
-rw-r--r--external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala2
-rw-r--r--external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala2
-rw-r--r--external/kafka/src/test/scala/org/apache/spark/streaming/kafka/KafkaRDDSuite.scala2
-rw-r--r--external/mqtt/src/main/scala/org/apache/spark/streaming/mqtt/MQTTInputDStream.scala14
4 files changed, 4 insertions, 16 deletions
diff --git a/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala b/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala
index ea45b14294..7ad43b1d7b 100644
--- a/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala
+++ b/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala
@@ -143,7 +143,7 @@ private class TransactionProcessor(val channel: Channel, val seqNum: String,
eventBatch.setErrorMsg(msg)
} else {
// At this point, the events are available, so fill them into the event batch
- eventBatch = new EventBatch("",seqNum, events)
+ eventBatch = new EventBatch("", seqNum, events)
}
})
} catch {
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala
index dc629df4f4..65c49c1315 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala
@@ -60,7 +60,7 @@ private[streaming] object EventTransformer extends Logging {
out.write(body)
val numHeaders = headers.size()
out.writeInt(numHeaders)
- for ((k,v) <- headers) {
+ for ((k, v) <- headers) {
val keyBuff = Utils.serialize(k.toString)
out.writeInt(keyBuff.length)
out.write(keyBuff)
diff --git a/external/kafka/src/test/scala/org/apache/spark/streaming/kafka/KafkaRDDSuite.scala b/external/kafka/src/test/scala/org/apache/spark/streaming/kafka/KafkaRDDSuite.scala
index 39c3fb448f..3c875cb766 100644
--- a/external/kafka/src/test/scala/org/apache/spark/streaming/kafka/KafkaRDDSuite.scala
+++ b/external/kafka/src/test/scala/org/apache/spark/streaming/kafka/KafkaRDDSuite.scala
@@ -65,7 +65,7 @@ class KafkaRDDSuite extends FunSuite with BeforeAndAfterAll {
val offsetRanges = Array(OffsetRange(topic, 0, 0, messages.size))
- val rdd = KafkaUtils.createRDD[String, String, StringDecoder, StringDecoder](
+ val rdd = KafkaUtils.createRDD[String, String, StringDecoder, StringDecoder](
sc, kafkaParams, offsetRanges)
val received = rdd.map(_._2).collect.toSet
diff --git a/external/mqtt/src/main/scala/org/apache/spark/streaming/mqtt/MQTTInputDStream.scala b/external/mqtt/src/main/scala/org/apache/spark/streaming/mqtt/MQTTInputDStream.scala
index 40f5f18547..7c2f18cb35 100644
--- a/external/mqtt/src/main/scala/org/apache/spark/streaming/mqtt/MQTTInputDStream.scala
+++ b/external/mqtt/src/main/scala/org/apache/spark/streaming/mqtt/MQTTInputDStream.scala
@@ -17,22 +17,10 @@
package org.apache.spark.streaming.mqtt
-import java.io.IOException
-import java.util.concurrent.Executors
-import java.util.Properties
-
-import scala.collection.JavaConversions._
-import scala.collection.Map
-import scala.collection.mutable.HashMap
-import scala.reflect.ClassTag
-
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken
import org.eclipse.paho.client.mqttv3.MqttCallback
import org.eclipse.paho.client.mqttv3.MqttClient
-import org.eclipse.paho.client.mqttv3.MqttClientPersistence
-import org.eclipse.paho.client.mqttv3.MqttException
import org.eclipse.paho.client.mqttv3.MqttMessage
-import org.eclipse.paho.client.mqttv3.MqttTopic
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.apache.spark.storage.StorageLevel
@@ -87,7 +75,7 @@ class MQTTReceiver(
// Handles Mqtt message
override def messageArrived(topic: String, message: MqttMessage) {
- store(new String(message.getPayload(),"utf-8"))
+ store(new String(message.getPayload(), "utf-8"))
}
override def deliveryComplete(token: IMqttDeliveryToken) {