aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-04-03 01:25:02 -0700
committerReynold Xin <rxin@databricks.com>2015-04-03 01:25:02 -0700
commit82701ee25fda64f03899713bc56f82ca6f278151 (patch)
tree07fba36d66228f7561bd65dd502fd668d50a9be5 /examples
parentc42c3fc7f7b79a1f6ce990d39b5d9d14ab19fcf0 (diff)
downloadspark-82701ee25fda64f03899713bc56f82ca6f278151.tar.gz
spark-82701ee25fda64f03899713bc56f82ca6f278151.tar.bz2
spark-82701ee25fda64f03899713bc56f82ca6f278151.zip
[SPARK-6428] Turn on explicit type checking for public methods.
This builds on my earlier pull requests and turns on the explicit type checking in scalastyle. Author: Reynold Xin <rxin@databricks.com> Closes #5342 from rxin/SPARK-6428 and squashes the following commits: 7b531ab [Reynold Xin] import ordering 2d9a8a5 [Reynold Xin] jl e668b1c [Reynold Xin] override 9b9e119 [Reynold Xin] Parenthesis. 82e0cf5 [Reynold Xin] [SPARK-6428] Turn on explicit type checking for public methods.
Diffstat (limited to 'examples')
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LocalLR.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LogQuery.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkLR.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkTC.scala2
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala2
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/streaming/ActorWordCount.scala6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/streaming/RecoverableNetworkWordCount.scala3
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/streaming/ZeroMQWordCount.scala6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/streaming/clickstream/PageViewGenerator.scala2
11 files changed, 22 insertions, 19 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
index 17624c20cf..f73eac1e2b 100644
--- a/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
@@ -40,8 +40,8 @@ object LocalKMeans {
val convergeDist = 0.001
val rand = new Random(42)
- def generateData = {
- def generatePoint(i: Int) = {
+ def generateData: Array[DenseVector[Double]] = {
+ def generatePoint(i: Int): DenseVector[Double] = {
DenseVector.fill(D){rand.nextDouble * R}
}
Array.tabulate(N)(generatePoint)
diff --git a/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala b/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
index 92a683ad57..a55e0dc8d3 100644
--- a/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
@@ -37,8 +37,8 @@ object LocalLR {
case class DataPoint(x: Vector[Double], y: Double)
- def generateData = {
- def generatePoint(i: Int) = {
+ def generateData: Array[DataPoint] = {
+ def generatePoint(i: Int): DataPoint = {
val y = if(i % 2 == 0) -1 else 1
val x = DenseVector.fill(D){rand.nextGaussian + y * R}
DataPoint(x, y)
diff --git a/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
index 74620ad007..32e02eab8b 100644
--- a/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
@@ -54,8 +54,8 @@ object LogQuery {
// scalastyle:on
/** Tracks the total query count and number of aggregate bytes for a particular group. */
class Stats(val count: Int, val numBytes: Int) extends Serializable {
- def merge(other: Stats) = new Stats(count + other.count, numBytes + other.numBytes)
- override def toString = "bytes=%s\tn=%s".format(numBytes, count)
+ def merge(other: Stats): Stats = new Stats(count + other.count, numBytes + other.numBytes)
+ override def toString: String = "bytes=%s\tn=%s".format(numBytes, count)
}
def extractKey(line: String): (String, String, String) = {
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
index 257a7d29f9..8c01a60844 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
@@ -42,8 +42,8 @@ object SparkLR {
case class DataPoint(x: Vector[Double], y: Double)
- def generateData = {
- def generatePoint(i: Int) = {
+ def generateData: Array[DataPoint] = {
+ def generatePoint(i: Int): DataPoint = {
val y = if(i % 2 == 0) -1 else 1
val x = DenseVector.fill(D){rand.nextGaussian + y * R}
DataPoint(x, y)
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
index f7f83086df..772cd897f5 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
@@ -31,7 +31,7 @@ object SparkTC {
val numVertices = 100
val rand = new Random(42)
- def generateGraph = {
+ def generateGraph: Seq[(Int, Int)] = {
val edges: mutable.Set[(Int, Int)] = mutable.Set.empty
while (edges.size < numEdges) {
val from = rand.nextInt(numVertices)
diff --git a/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
index e322d4ce5a..ab6e63deb3 100644
--- a/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
@@ -90,7 +90,7 @@ class PRMessage() extends Message[String] with Serializable {
}
class CustomPartitioner(partitions: Int) extends Partitioner {
- def numPartitions = partitions
+ def numPartitions: Int = partitions
def getPartition(key: Any): Int = {
val hash = key match {
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
index 1f4ca4fbe7..0bc36ea65e 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
@@ -178,7 +178,9 @@ object MovieLensALS {
def computeRmse(model: MatrixFactorizationModel, data: RDD[Rating], implicitPrefs: Boolean)
: Double = {
- def mapPredictedRating(r: Double) = if (implicitPrefs) math.max(math.min(r, 1.0), 0.0) else r
+ def mapPredictedRating(r: Double): Double = {
+ if (implicitPrefs) math.max(math.min(r, 1.0), 0.0) else r
+ }
val predictions: RDD[Rating] = model.predict(data.map(x => (x.user, x.product)))
val predictionsAndRatings = predictions.map{ x =>
diff --git a/examples/src/main/scala/org/apache/spark/examples/streaming/ActorWordCount.scala b/examples/src/main/scala/org/apache/spark/examples/streaming/ActorWordCount.scala
index b433082dce..92867b44be 100644
--- a/examples/src/main/scala/org/apache/spark/examples/streaming/ActorWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/streaming/ActorWordCount.scala
@@ -85,13 +85,13 @@ extends Actor with ActorHelper {
lazy private val remotePublisher = context.actorSelection(urlOfPublisher)
- override def preStart = remotePublisher ! SubscribeReceiver(context.self)
+ override def preStart(): Unit = remotePublisher ! SubscribeReceiver(context.self)
- def receive = {
+ def receive: PartialFunction[Any, Unit] = {
case msg => store(msg.asInstanceOf[T])
}
- override def postStop() = remotePublisher ! UnsubscribeReceiver(context.self)
+ override def postStop(): Unit = remotePublisher ! UnsubscribeReceiver(context.self)
}
diff --git a/examples/src/main/scala/org/apache/spark/examples/streaming/RecoverableNetworkWordCount.scala b/examples/src/main/scala/org/apache/spark/examples/streaming/RecoverableNetworkWordCount.scala
index c3a05c89d8..751b30ea15 100644
--- a/examples/src/main/scala/org/apache/spark/examples/streaming/RecoverableNetworkWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/streaming/RecoverableNetworkWordCount.scala
@@ -55,7 +55,8 @@ import org.apache.spark.util.IntParam
*/
object RecoverableNetworkWordCount {
- def createContext(ip: String, port: Int, outputPath: String, checkpointDirectory: String) = {
+ def createContext(ip: String, port: Int, outputPath: String, checkpointDirectory: String)
+ : StreamingContext = {
// If you do not see this printed, that means the StreamingContext has been loaded
// from the new checkpoint
diff --git a/examples/src/main/scala/org/apache/spark/examples/streaming/ZeroMQWordCount.scala b/examples/src/main/scala/org/apache/spark/examples/streaming/ZeroMQWordCount.scala
index 6510c70bd1..e99d1baa72 100644
--- a/examples/src/main/scala/org/apache/spark/examples/streaming/ZeroMQWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/streaming/ZeroMQWordCount.scala
@@ -35,7 +35,7 @@ import org.apache.spark.SparkConf
*/
object SimpleZeroMQPublisher {
- def main(args: Array[String]) = {
+ def main(args: Array[String]): Unit = {
if (args.length < 2) {
System.err.println("Usage: SimpleZeroMQPublisher <zeroMQUrl> <topic> ")
System.exit(1)
@@ -45,7 +45,7 @@ object SimpleZeroMQPublisher {
val acs: ActorSystem = ActorSystem()
val pubSocket = ZeroMQExtension(acs).newSocket(SocketType.Pub, Bind(url))
- implicit def stringToByteString(x: String) = ByteString(x)
+ implicit def stringToByteString(x: String): ByteString = ByteString(x)
val messages: List[ByteString] = List("words ", "may ", "count ")
while (true) {
Thread.sleep(1000)
@@ -86,7 +86,7 @@ object ZeroMQWordCount {
// Create the context and set the batch size
val ssc = new StreamingContext(sparkConf, Seconds(2))
- def bytesToStringIterator(x: Seq[ByteString]) = (x.map(_.utf8String)).iterator
+ def bytesToStringIterator(x: Seq[ByteString]): Iterator[String] = x.map(_.utf8String).iterator
// For this stream, a zeroMQ publisher should be running.
val lines = ZeroMQUtils.createStream(ssc, url, Subscribe(topic), bytesToStringIterator _)
diff --git a/examples/src/main/scala/org/apache/spark/examples/streaming/clickstream/PageViewGenerator.scala b/examples/src/main/scala/org/apache/spark/examples/streaming/clickstream/PageViewGenerator.scala
index 8402491b62..54d996b8ac 100644
--- a/examples/src/main/scala/org/apache/spark/examples/streaming/clickstream/PageViewGenerator.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/streaming/clickstream/PageViewGenerator.scala
@@ -94,7 +94,7 @@ object PageViewGenerator {
while (true) {
val socket = listener.accept()
new Thread() {
- override def run = {
+ override def run(): Unit = {
println("Got client connected from: " + socket.getInetAddress)
val out = new PrintWriter(socket.getOutputStream(), true)