diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala b/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala index 6c40722725..11f2432575 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala @@ -32,6 +32,8 @@ import org.apache.spark.util.Utils * This process is launched (via SparkSubmit) by the PySpark driver (see java_gateway.py). */ private[spark] object PythonGatewayServer extends Logging { + initializeLogIfNecessary(true) + def main(args: Array[String]): Unit = Utils.tryOrExit { // Start a GatewayServer on an ephemeral port val gatewayServer: GatewayServer = new GatewayServer(null, 0) diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackend.scala b/core/src/main/scala/org/apache/spark/api/r/RBackend.scala index 9d29a84413..41d0a85ee3 100644 --- a/core/src/main/scala/org/apache/spark/api/r/RBackend.scala +++ b/core/src/main/scala/org/apache/spark/api/r/RBackend.scala @@ -94,6 +94,8 @@ private[spark] class RBackend { } private[spark] object RBackend extends Logging { + initializeLogIfNecessary(true) + def main(args: Array[String]): Unit = { if (args.length < 1) { // scalastyle:off println @@ -101,6 +103,7 @@ private[spark] object RBackend extends Logging { // scalastyle:on println System.exit(-1) } + val sparkRBackend = new RBackend() try { // bind to random port