[SPARK-36615][CORE] Register shutdown hook earlier when start SC

### What changes were proposed in this pull request?
Since user always use ctrl+c to stop a starting SC when register with yarn in client mode when resources are tight.

In this time, SC have not register the Shutdown hook, this cause we won't invoke `sc.stop()` when exit the application.
We should register the ShutdownHook earlier when starting a SparkContext.

### Why are the changes needed?

Make sure we will invoke `sc.stop()` when kill a starting SparkContext application.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
Not need

Closes #33869 from AngersZhuuuu/SPARK-36615.

Authored-by: Angerszhuuuu <angers.zhu@gmail.com>
Signed-off-by: Mridul Muralidharan <mridul<at>gmail.com>
This commit is contained in:
Angerszhuuuu 2021-09-21 13:23:14 -05:00 committed by Mridul Muralidharan
parent ba5708d944
commit b7d99e3eea

View file

@ -645,20 +645,6 @@ class SparkContext(config: SparkConf) extends Logging {
// Attach the driver metrics servlet handler to the web ui after the metrics system is started.
_env.metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler)))
// Post init
_taskScheduler.postStartHook()
if (isLocal) {
_env.metricsSystem.registerSource(Executor.executorSourceLocalModeOnly)
}
_env.metricsSystem.registerSource(_dagScheduler.metricsSource)
_env.metricsSystem.registerSource(new BlockManagerSource(_env.blockManager))
_env.metricsSystem.registerSource(new JVMCPUSource())
_executorMetricsSource.foreach(_.register(_env.metricsSystem))
_executorAllocationManager.foreach { e =>
_env.metricsSystem.registerSource(e.executorAllocationManagerSource)
}
appStatusSource.foreach(_env.metricsSystem.registerSource(_))
_plugins.foreach(_.registerMetrics(applicationId))
// Make sure the context is stopped if the user forgets about it. This avoids leaving
// unfinished event logs around after the JVM exits cleanly. It doesn't help if the JVM
// is killed, though.
@ -673,6 +659,21 @@ class SparkContext(config: SparkConf) extends Logging {
logWarning("Ignoring Exception while stopping SparkContext from shutdown hook", e)
}
}
// Post init
_taskScheduler.postStartHook()
if (isLocal) {
_env.metricsSystem.registerSource(Executor.executorSourceLocalModeOnly)
}
_env.metricsSystem.registerSource(_dagScheduler.metricsSource)
_env.metricsSystem.registerSource(new BlockManagerSource(_env.blockManager))
_env.metricsSystem.registerSource(new JVMCPUSource())
_executorMetricsSource.foreach(_.register(_env.metricsSystem))
_executorAllocationManager.foreach { e =>
_env.metricsSystem.registerSource(e.executorAllocationManagerSource)
}
appStatusSource.foreach(_env.metricsSystem.registerSource(_))
_plugins.foreach(_.registerMetrics(applicationId))
} catch {
case NonFatal(e) =>
logError("Error initializing SparkContext.", e)