From 1ba3c173034c37ef99fc312c84943d2ab8885670 Mon Sep 17 00:00:00 2001 From: Jey Kottalam Date: Thu, 20 Jun 2013 12:49:10 -0400 Subject: [PATCH] use parens when calling method with side-effects --- core/src/main/scala/spark/SparkEnv.scala | 2 +- .../main/scala/spark/api/python/PythonRDD.scala | 4 ++-- .../spark/api/python/PythonWorkerFactory.scala | 14 +++++++------- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/spark/SparkEnv.scala index 0a23c45658..7ccde2e818 100644 --- a/core/src/main/scala/spark/SparkEnv.scala +++ b/core/src/main/scala/spark/SparkEnv.scala @@ -59,7 +59,7 @@ class SparkEnv ( def createPythonWorker(pythonExec: String, envVars: Map[String, String]): java.net.Socket = { synchronized { - pythonWorkers.getOrElseUpdate((pythonExec, envVars), new PythonWorkerFactory(pythonExec, envVars)).create + pythonWorkers.getOrElseUpdate((pythonExec, envVars), new PythonWorkerFactory(pythonExec, envVars)).create() } } diff --git a/core/src/main/scala/spark/api/python/PythonRDD.scala b/core/src/main/scala/spark/api/python/PythonRDD.scala index 3c48071b3f..63140cf37f 100644 --- a/core/src/main/scala/spark/api/python/PythonRDD.scala +++ b/core/src/main/scala/spark/api/python/PythonRDD.scala @@ -116,12 +116,12 @@ private[spark] class PythonRDD[T: ClassManifest]( // We've finished the data section of the output, but we can still // read some accumulator updates; let's do that, breaking when we // get a negative length record. - var len2 = stream.readInt + var len2 = stream.readInt() while (len2 >= 0) { val update = new Array[Byte](len2) stream.readFully(update) accumulator += Collections.singletonList(update) - len2 = stream.readInt + len2 = stream.readInt() } new Array[Byte](0) } diff --git a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala index ebbd226b3e..8844411d73 100644 --- a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala +++ b/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala @@ -16,7 +16,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String def create(): Socket = { synchronized { // Start the daemon if it hasn't been started - startDaemon + startDaemon() // Attempt to connect, restart and retry once if it fails try { @@ -24,8 +24,8 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String } catch { case exc: SocketException => { logWarning("Python daemon unexpectedly quit, attempting to restart") - stopDaemon - startDaemon + stopDaemon() + startDaemon() new Socket(daemonHost, daemonPort) } case e => throw e @@ -34,7 +34,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String } def stop() { - stopDaemon + stopDaemon() } private def startDaemon() { @@ -51,7 +51,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String val workerEnv = pb.environment() workerEnv.putAll(envVars) daemon = pb.start() - daemonPort = new DataInputStream(daemon.getInputStream).readInt + daemonPort = new DataInputStream(daemon.getInputStream).readInt() // Redirect the stderr to ours new Thread("stderr reader for " + pythonExec) { @@ -71,7 +71,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String }.start() } catch { case e => { - stopDaemon + stopDaemon() throw e } } @@ -85,7 +85,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String synchronized { // Request shutdown of existing daemon by sending SIGTERM if (daemon != null) { - daemon.destroy + daemon.destroy() } daemon = null