From 6e31ccf2a196881c7b4ffbe0afba7d93a7f2875c Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Wed, 27 Feb 2019 17:01:30 -0800 Subject: [PATCH] [SPARK-26895][CORE][FOLLOW-UP] Uninitializing log after `prepareSubmitEnvironment` in SparkSubmit ## What changes were proposed in this pull request? Currently, if I run `spark-shell` in my local, it started to show the logs as below: ``` $ ./bin/spark-shell ... 19/02/28 04:42:43 INFO SecurityManager: Changing view acls to: hkwon 19/02/28 04:42:43 INFO SecurityManager: Changing modify acls to: hkwon 19/02/28 04:42:43 INFO SecurityManager: Changing view acls groups to: 19/02/28 04:42:43 INFO SecurityManager: Changing modify acls groups to: 19/02/28 04:42:43 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(hkwon); groups with view permissions: Set(); users with modify permissions: Set(hkwon); groups with modify permissions: Set() 19/02/28 04:42:43 INFO SignalUtils: Registered signal handler for INT 19/02/28 04:42:48 INFO SparkContext: Running Spark version 3.0.0-SNAPSHOT 19/02/28 04:42:48 INFO SparkContext: Submitted application: Spark shell 19/02/28 04:42:48 INFO SecurityManager: Changing view acls to: hkwon ``` Seems to be the cause is https://github.com/apache/spark/pull/23806 and `prepareSubmitEnvironment` looks actually reinitializing the logging again. This PR proposes to uninitializing log later after `prepareSubmitEnvironment`. ## How was this patch tested? Manually tested. Closes #23911 from HyukjinKwon/SPARK-26895. Authored-by: Hyukjin Kwon Signed-off-by: Marcelo Vanzin --- .../org/apache/spark/deploy/SparkSubmit.scala | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 2843bd5b33..45ad7b391b 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -144,7 +144,7 @@ private[spark] class SparkSubmit extends Logging { try { proxyUser.doAs(new PrivilegedExceptionAction[Unit]() { override def run(): Unit = { - runMain(args) + runMain(args, uninitLog) } }) } catch { @@ -159,15 +159,10 @@ private[spark] class SparkSubmit extends Logging { } } } else { - runMain(args) + runMain(args, uninitLog) } } - // Let the main class re-initialize the logging system once it starts. - if (uninitLog) { - Logging.uninitialize() - } - // In standalone cluster mode, there are two submission gateways: // (1) The traditional RPC gateway using o.a.s.deploy.Client as a wrapper // (2) The new REST-based gateway introduced in Spark 1.3 @@ -777,8 +772,13 @@ private[spark] class SparkSubmit extends Logging { * Note that this main class will not be the one provided by the user if we're * running cluster deploy mode or python applications. */ - private def runMain(args: SparkSubmitArguments): Unit = { + private def runMain(args: SparkSubmitArguments, uninitLog: Boolean): Unit = { val (childArgs, childClasspath, sparkConf, childMainClass) = prepareSubmitEnvironment(args) + // Let the main class re-initialize the logging system once it starts. + if (uninitLog) { + Logging.uninitialize() + } + if (args.verbose) { logInfo(s"Main class:\n$childMainClass") logInfo(s"Arguments:\n${childArgs.mkString("\n")}")