diff --git a/core/src/main/scala/spark/Executor.scala b/core/src/main/scala/spark/Executor.scala index c795b6c351..c8cb730d14 100644 --- a/core/src/main/scala/spark/Executor.scala +++ b/core/src/main/scala/spark/Executor.scala @@ -37,17 +37,17 @@ class Executor extends org.apache.mesos.Executor with Logging { // Make sure an appropriate class loader is set for remote actors RemoteActor.classLoader = getClass.getClassLoader - + + // Create our ClassLoader (using spark properties) and set it on this thread + classLoader = createClassLoader() + Thread.currentThread.setContextClassLoader(classLoader) + // Initialize Spark environment (using system properties read above) env = SparkEnv.createFromSystemProperties(false) SparkEnv.set(env) // Old stuff that isn't yet using env Broadcast.initialize(false) - // Create our ClassLoader (using spark properties) and set it on this thread - classLoader = createClassLoader() - Thread.currentThread.setContextClassLoader(classLoader) - // Start worker thread pool threadPool = new ThreadPoolExecutor( 1, 128, 600, TimeUnit.SECONDS, new SynchronousQueue[Runnable]) diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/spark/SparkEnv.scala index cd752f8b65..7e07811c90 100644 --- a/core/src/main/scala/spark/SparkEnv.scala +++ b/core/src/main/scala/spark/SparkEnv.scala @@ -26,7 +26,7 @@ object SparkEnv { val cache = Class.forName(cacheClass).newInstance().asInstanceOf[Cache] val serializerClass = System.getProperty("spark.serializer", "spark.JavaSerializer") - val serializer = Class.forName(serializerClass).newInstance().asInstanceOf[Serializer] + val serializer = Class.forName(serializerClass, true, Thread.currentThread.getContextClassLoader).newInstance().asInstanceOf[Serializer] val closureSerializerClass = System.getProperty("spark.closure.serializer", "spark.JavaSerializer")