aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorJey Kottalam <jey@cs.berkeley.edu>2013-06-20 12:49:10 -0400
committerJey Kottalam <jey@cs.berkeley.edu>2013-06-21 12:14:16 -0400
commit1ba3c173034c37ef99fc312c84943d2ab8885670 (patch)
treee6fd941743b43ef801e1a9b4d64fab06917a4265 /core/src
parent7c5ff733ee1d3729b4b26f7c5542ca00c4d64139 (diff)
downloadspark-1ba3c173034c37ef99fc312c84943d2ab8885670.tar.gz
spark-1ba3c173034c37ef99fc312c84943d2ab8885670.tar.bz2
spark-1ba3c173034c37ef99fc312c84943d2ab8885670.zip
use parens when calling method with side-effects
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/spark/SparkEnv.scala2
-rw-r--r--core/src/main/scala/spark/api/python/PythonRDD.scala4
-rw-r--r--core/src/main/scala/spark/api/python/PythonWorkerFactory.scala14
3 files changed, 10 insertions, 10 deletions
diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/spark/SparkEnv.scala
index 0a23c45658..7ccde2e818 100644
--- a/core/src/main/scala/spark/SparkEnv.scala
+++ b/core/src/main/scala/spark/SparkEnv.scala
@@ -59,7 +59,7 @@ class SparkEnv (
def createPythonWorker(pythonExec: String, envVars: Map[String, String]): java.net.Socket = {
synchronized {
- pythonWorkers.getOrElseUpdate((pythonExec, envVars), new PythonWorkerFactory(pythonExec, envVars)).create
+ pythonWorkers.getOrElseUpdate((pythonExec, envVars), new PythonWorkerFactory(pythonExec, envVars)).create()
}
}
diff --git a/core/src/main/scala/spark/api/python/PythonRDD.scala b/core/src/main/scala/spark/api/python/PythonRDD.scala
index 3c48071b3f..63140cf37f 100644
--- a/core/src/main/scala/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/spark/api/python/PythonRDD.scala
@@ -116,12 +116,12 @@ private[spark] class PythonRDD[T: ClassManifest](
// We've finished the data section of the output, but we can still
// read some accumulator updates; let's do that, breaking when we
// get a negative length record.
- var len2 = stream.readInt
+ var len2 = stream.readInt()
while (len2 >= 0) {
val update = new Array[Byte](len2)
stream.readFully(update)
accumulator += Collections.singletonList(update)
- len2 = stream.readInt
+ len2 = stream.readInt()
}
new Array[Byte](0)
}
diff --git a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala
index ebbd226b3e..8844411d73 100644
--- a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala
@@ -16,7 +16,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
def create(): Socket = {
synchronized {
// Start the daemon if it hasn't been started
- startDaemon
+ startDaemon()
// Attempt to connect, restart and retry once if it fails
try {
@@ -24,8 +24,8 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
} catch {
case exc: SocketException => {
logWarning("Python daemon unexpectedly quit, attempting to restart")
- stopDaemon
- startDaemon
+ stopDaemon()
+ startDaemon()
new Socket(daemonHost, daemonPort)
}
case e => throw e
@@ -34,7 +34,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
}
def stop() {
- stopDaemon
+ stopDaemon()
}
private def startDaemon() {
@@ -51,7 +51,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
val workerEnv = pb.environment()
workerEnv.putAll(envVars)
daemon = pb.start()
- daemonPort = new DataInputStream(daemon.getInputStream).readInt
+ daemonPort = new DataInputStream(daemon.getInputStream).readInt()
// Redirect the stderr to ours
new Thread("stderr reader for " + pythonExec) {
@@ -71,7 +71,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
}.start()
} catch {
case e => {
- stopDaemon
+ stopDaemon()
throw e
}
}
@@ -85,7 +85,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String
synchronized {
// Request shutdown of existing daemon by sending SIGTERM
if (daemon != null) {
- daemon.destroy
+ daemon.destroy()
}
daemon = null