aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/network/Connection.scala12
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManager.scala14
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/network/ReceiverTest.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/network/SenderTest.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/MutablePair.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala2
-rw-r--r--external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala2
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala2
-rw-r--r--project/project/SparkPluginBuild.scala3
-rw-r--r--project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStartChecker.scala (renamed from project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStyleCheck.scala)8
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala6
13 files changed, 32 insertions, 31 deletions
diff --git a/core/src/main/scala/org/apache/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala
index 16bd00fd18..2f7576c53b 100644
--- a/core/src/main/scala/org/apache/spark/network/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/Connection.scala
@@ -211,7 +211,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
def addMessage(message: Message) {
messages.synchronized{
- /* messages += message*/
+ /* messages += message */
messages.enqueue(message)
logDebug("Added [" + message + "] to outbox for sending to " +
"[" + getRemoteConnectionManagerId() + "]")
@@ -222,7 +222,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
messages.synchronized {
while (!messages.isEmpty) {
/* nextMessageToBeUsed = nextMessageToBeUsed % messages.size */
- /* val message = messages(nextMessageToBeUsed)*/
+ /* val message = messages(nextMessageToBeUsed) */
val message = messages.dequeue
val chunk = message.getChunkForSending(defaultChunkSize)
if (chunk.isDefined) {
@@ -262,7 +262,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
val currentBuffers = new ArrayBuffer[ByteBuffer]()
- /* channel.socket.setSendBufferSize(256 * 1024)*/
+ /* channel.socket.setSendBufferSize(256 * 1024) */
override def getRemoteAddress() = address
@@ -355,7 +355,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
}
case None => {
// changeConnectionKeyInterest(0)
- /* key.interestOps(0)*/
+ /* key.interestOps(0) */
return false
}
}
@@ -540,10 +540,10 @@ private[spark] class ReceivingConnection(
return false
}
- /* logDebug("Read " + bytesRead + " bytes for the buffer")*/
+ /* logDebug("Read " + bytesRead + " bytes for the buffer") */
if (currentChunk.buffer.remaining == 0) {
- /* println("Filled buffer at " + System.currentTimeMillis)*/
+ /* println("Filled buffer at " + System.currentTimeMillis) */
val bufferMessage = inbox.getMessageForChunk(currentChunk).get
if (bufferMessage.isCompletelyReceived) {
bufferMessage.flip
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
index 2682f9d0ed..6b0a972f0b 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
@@ -505,7 +505,7 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf,
}
}
handleMessageExecutor.execute(runnable)
- /* handleMessage(connection, message)*/
+ /* handleMessage(connection, message) */
}
private def handleClientAuthentication(
@@ -859,14 +859,14 @@ private[spark] object ConnectionManager {
None
})
- /* testSequentialSending(manager)*/
- /* System.gc()*/
+ /* testSequentialSending(manager) */
+ /* System.gc() */
- /* testParallelSending(manager)*/
- /* System.gc()*/
+ /* testParallelSending(manager) */
+ /* System.gc() */
- /* testParallelDecreasingSending(manager)*/
- /* System.gc()*/
+ /* testParallelDecreasingSending(manager) */
+ /* System.gc() */
testContinuousSending(manager)
System.gc()
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
index e5745d7daa..9d9b9dbdd5 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
@@ -47,8 +47,8 @@ private[spark] object ConnectionManagerTest extends Logging{
val slaves = slavesFile.mkString.split("\n")
slavesFile.close()
- /* println("Slaves")*/
- /* slaves.foreach(println)*/
+ /* println("Slaves") */
+ /* slaves.foreach(println) */
val tasknum = if (args.length > 2) args(2).toInt else slaves.length
val size = ( if (args.length > 3) (args(3).toInt) else 10 ) * 1024 * 1024
val count = if (args.length > 4) args(4).toInt else 3
diff --git a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
index 17fd931c9f..2b41c403b2 100644
--- a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
@@ -27,7 +27,7 @@ private[spark] object ReceiverTest {
println("Started connection manager with id = " + manager.id)
manager.onReceiveMessage((msg: Message, id: ConnectionManagerId) => {
- /* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis)*/
+ /* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis) */
val buffer = ByteBuffer.wrap("response".getBytes)
Some(Message.createBufferMessage(buffer, msg.id))
})
diff --git a/core/src/main/scala/org/apache/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
index 905eddfbb9..14c094c617 100644
--- a/core/src/main/scala/org/apache/spark/network/SenderTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
@@ -50,7 +50,7 @@ private[spark] object SenderTest {
(0 until count).foreach(i => {
val dataMessage = Message.createBufferMessage(buffer.duplicate)
val startTime = System.currentTimeMillis
- /* println("Started timer at " + startTime)*/
+ /* println("Started timer at " + startTime) */
val responseStr = manager.sendMessageReliablySync(targetConnectionManagerId, dataMessage)
.map { response =>
val buffer = response.asInstanceOf[BufferMessage].buffers(0)
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
index f3c93d4214..70d62b66a4 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
@@ -25,7 +25,7 @@ import org.apache.spark.scheduler.Schedulable
import org.apache.spark.ui.Page._
import org.apache.spark.ui.UIUtils
-/** Page showing list of all ongoing and recently finished stages and pools*/
+/** Page showing list of all ongoing and recently finished stages and pools */
private[ui] class IndexPage(parent: JobProgressUI) {
private val appName = parent.appName
private val basePath = parent.basePath
diff --git a/core/src/main/scala/org/apache/spark/util/MutablePair.scala b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
index a898824cff..a6b39247a5 100644
--- a/core/src/main/scala/org/apache/spark/util/MutablePair.scala
+++ b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
@@ -24,8 +24,8 @@ package org.apache.spark.util
* @param _1 Element 1 of this MutablePair
* @param _2 Element 2 of this MutablePair
*/
-case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T1,
- @specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T2]
+case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T1,
+ @specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T2]
(var _1: T1, var _2: T2)
extends Product2[T1, T2]
{
diff --git a/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
index 0ac46c31c2..251f65fe4d 100644
--- a/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
@@ -21,7 +21,7 @@ import java.net.ServerSocket
import java.io.PrintWriter
import util.Random
-/** Represents a page view on a website with associated dimension data.*/
+/** Represents a page view on a website with associated dimension data. */
class PageView(val url : String, val status : Int, val zipCode : Int, val userID : Int)
extends Serializable {
override def toString() : String = {
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
index ce3ef47cfe..34012b846e 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
@@ -127,7 +127,7 @@ class FlumeEventServer(receiver : FlumeReceiver) extends AvroSourceProtocol {
}
/** A NetworkReceiver which listens for events using the
- * Flume Avro interface.*/
+ * Flume Avro interface. */
private[streaming]
class FlumeReceiver(
host: String,
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala
index bebe3740bc..9d4f3750cb 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala
@@ -45,7 +45,7 @@ class VertexBroadcastMsg[@specialized(Int, Long, Double, Boolean) T](
* @param data value to send
*/
private[graphx]
-class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T](
+class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T](
@transient var partition: PartitionID,
var data: T)
extends Product2[PartitionID, T] with Serializable {
diff --git a/project/project/SparkPluginBuild.scala b/project/project/SparkPluginBuild.scala
index 43361aa2b4..5a307044ba 100644
--- a/project/project/SparkPluginBuild.scala
+++ b/project/project/SparkPluginBuild.scala
@@ -34,8 +34,7 @@ object SparkPluginDef extends Build {
version := sparkVersion,
scalaVersion := "2.10.3",
scalacOptions := Seq("-unchecked", "-deprecation"),
- libraryDependencies ++= Dependencies.scalaStyle,
- sbtPlugin := true
+ libraryDependencies ++= Dependencies.scalaStyle
)
object Dependencies {
diff --git a/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStyleCheck.scala b/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStartChecker.scala
index 2f3c1a1828..80d3faa3fe 100644
--- a/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStyleCheck.scala
+++ b/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStartChecker.scala
@@ -25,13 +25,15 @@ import scalariform.lexer.{MultiLineComment, ScalaDocComment, SingleLineComment,
import scalariform.parser.CompilationUnit
class SparkSpaceAfterCommentStartChecker extends ScalariformChecker {
- val errorKey: String = "insert.a.single.space.after.comment.start"
+ val errorKey: String = "insert.a.single.space.after.comment.start.and.before.end"
private def multiLineCommentRegex(comment: Token) =
- Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
+ Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
+ Pattern.compile( """/\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()
private def scalaDocPatternRegex(comment: Token) =
- Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
+ Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
+ Pattern.compile( """/\*\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()
private def singleLineCommentRegex(comment: Token): Boolean =
comment.text.trim.matches( """//\S+.*""") && !comment.text.trim.matches( """///+""")
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
index 67a34e1f21..4ab755c096 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
@@ -57,19 +57,19 @@ import org.apache.spark.sql.catalyst.types._
case class ParquetRelation(tableName: String, path: String)
extends BaseRelation with MultiInstanceRelation {
- /** Schema derived from ParquetFile **/
+ /** Schema derived from ParquetFile */
def parquetSchema: MessageType =
ParquetTypesConverter
.readMetaData(new Path(path))
.getFileMetaData
.getSchema
- /** Attributes **/
+ /** Attributes */
val attributes =
ParquetTypesConverter
.convertToAttributes(parquetSchema)
- /** Output **/
+ /** Output */
override val output = attributes
// Parquet files have no concepts of keys, therefore no Partitioner