[SPARK-2645] [CORE] Allow SparkEnv.stop() to be called multiple times without side effects.

Fix for SparkContext stop behavior - Allow sc.stop() to be called multiple times without side effects.

Author: Joshi <rekhajoshm@gmail.com>
Author: Rekha Joshi <rekhajoshm@gmail.com>

Closes #6973 from rekhajoshm/SPARK-2645 and squashes the following commits:

277043e [Joshi] Fix for SparkContext stop behavior
446b0a4 [Joshi] Fix for SparkContext stop behavior
2ce5760 [Joshi] Fix for SparkContext stop behavior
c97839a [Joshi] Fix for SparkContext stop behavior
1aff39c [Joshi] Fix for SparkContext stop behavior
12f66b5 [Joshi] Fix for SparkContext stop behavior
72bb484 [Joshi] Fix for SparkContext stop behavior
a5a7d7f [Joshi] Fix for SparkContext stop behavior
9193a0c [Joshi] Fix for SparkContext stop behavior
58dba70 [Joshi] SPARK-2645: Fix for SparkContext stop behavior
380c5b0 [Joshi] SPARK-2645: Fix for SparkContext stop behavior
b566b66 [Joshi] SPARK-2645: Fix for SparkContext stop behavior
0be142d [Rekha Joshi] Merge pull request #3 from apache/master
106fd8e [Rekha Joshi] Merge pull request #2 from apache/master
e3677c9 [Rekha Joshi] Merge pull request #1 from apache/master
This commit is contained in:
Joshi 2015-06-30 14:00:35 -07:00 committed by Andrew Or
parent 79f0b371a3
commit 7dda0844e1
2 changed files with 44 additions and 29 deletions

View file

@ -22,7 +22,6 @@ import java.net.Socket
import akka.actor.ActorSystem
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.util.Properties
@ -90,8 +89,10 @@ class SparkEnv (
private var driverTmpDirToDelete: Option[String] = None
private[spark] def stop() {
if (!isStopped) {
isStopped = true
pythonWorkers.foreach { case(key, worker) => worker.stop() }
pythonWorkers.values.foreach(_.stop())
Option(httpFileServer).foreach(_.stop())
mapOutputTracker.stop()
shuffleManager.stop()
@ -125,6 +126,7 @@ class SparkEnv (
case None => // We just need to delete tmp dir created by driver, so do nothing on executor
}
}
}
private[spark]
def createPythonWorker(pythonExec: String, envVars: Map[String, String]): java.net.Socket = {

View file

@ -30,6 +30,7 @@ import org.apache.spark.util.Utils
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import org.scalatest.Matchers._
class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
@ -272,4 +273,16 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
sc.stop()
}
}
test("calling multiple sc.stop() must not throw any exception") {
noException should be thrownBy {
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
val cnt = sc.parallelize(1 to 4).count()
sc.cancelAllJobs()
sc.stop()
// call stop second time
sc.stop()
}
}
}