[SPARK-4877] Allow user first classes to extend classes in the parent.
Previously, the classloader isolation was almost too good, such that if a child class needed to load/reference a class that was only available in the parent, it could not do so. This adds tests for that case, the user-first Fake2 class extends the only-in-parent Fake3 class. It also sneaks in a fix where only the first stage seemed to work, and on subsequent stages, a LinkageError happened because classes from the user-first classpath were getting defined twice. Author: Stephen Haberman <stephen@exigencecorp.com> Closes #3725 from stephenh/4877_user_first_parent_inheritance and squashes the following commits: dabcd35 [Stephen Haberman] [SPARK-4877] Respect userClassPathFirst for the driver code too. 3d0fa7c [Stephen Haberman] [SPARK-4877] Allow user first classes to extend classes in the parent.
This commit is contained in:
parent
c01b9852ea
commit
9792bec596
|
@ -43,11 +43,20 @@ private[spark] object TestUtils {
|
|||
* Note: if this is used during class loader tests, class names should be unique
|
||||
* in order to avoid interference between tests.
|
||||
*/
|
||||
def createJarWithClasses(classNames: Seq[String], value: String = ""): URL = {
|
||||
def createJarWithClasses(
|
||||
classNames: Seq[String],
|
||||
toStringValue: String = "",
|
||||
classNamesWithBase: Seq[(String, String)] = Seq(),
|
||||
classpathUrls: Seq[URL] = Seq()): URL = {
|
||||
val tempDir = Utils.createTempDir()
|
||||
val files = for (name <- classNames) yield createCompiledClass(name, tempDir, value)
|
||||
val files1 = for (name <- classNames) yield {
|
||||
createCompiledClass(name, tempDir, toStringValue, classpathUrls = classpathUrls)
|
||||
}
|
||||
val files2 = for ((childName, baseName) <- classNamesWithBase) yield {
|
||||
createCompiledClass(childName, tempDir, toStringValue, baseName, classpathUrls)
|
||||
}
|
||||
val jarFile = new File(tempDir, "testJar-%s.jar".format(System.currentTimeMillis()))
|
||||
createJar(files, jarFile)
|
||||
createJar(files1 ++ files2, jarFile)
|
||||
}
|
||||
|
||||
|
||||
|
@ -85,15 +94,26 @@ private[spark] object TestUtils {
|
|||
}
|
||||
|
||||
/** Creates a compiled class with the given name. Class file will be placed in destDir. */
|
||||
def createCompiledClass(className: String, destDir: File, value: String = ""): File = {
|
||||
def createCompiledClass(
|
||||
className: String,
|
||||
destDir: File,
|
||||
toStringValue: String = "",
|
||||
baseClass: String = null,
|
||||
classpathUrls: Seq[URL] = Seq()): File = {
|
||||
val compiler = ToolProvider.getSystemJavaCompiler
|
||||
val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("")
|
||||
val sourceFile = new JavaSourceFromString(className,
|
||||
"public class " + className + " implements java.io.Serializable {" +
|
||||
" @Override public String toString() { return \"" + value + "\"; }}")
|
||||
"public class " + className + extendsText + " implements java.io.Serializable {" +
|
||||
" @Override public String toString() { return \"" + toStringValue + "\"; }}")
|
||||
|
||||
// Calling this outputs a class file in pwd. It's easier to just rename the file than
|
||||
// build a custom FileManager that controls the output location.
|
||||
compiler.getTask(null, null, null, null, null, Seq(sourceFile)).call()
|
||||
val options = if (classpathUrls.nonEmpty) {
|
||||
Seq("-classpath", classpathUrls.map { _.getFile }.mkString(File.pathSeparator))
|
||||
} else {
|
||||
Seq()
|
||||
}
|
||||
compiler.getTask(null, null, null, options, null, Seq(sourceFile)).call()
|
||||
|
||||
val fileName = className + ".class"
|
||||
val result = new File(fileName)
|
||||
|
|
|
@ -20,11 +20,9 @@ package org.apache.spark.deploy
|
|||
import java.io.{File, PrintStream}
|
||||
import java.lang.reflect.{Modifier, InvocationTargetException}
|
||||
import java.net.URL
|
||||
|
||||
import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
|
||||
|
||||
import org.apache.hadoop.fs.Path
|
||||
|
||||
import org.apache.ivy.Ivy
|
||||
import org.apache.ivy.core.LogOptions
|
||||
import org.apache.ivy.core.module.descriptor.{DefaultExcludeRule, DefaultDependencyDescriptor, DefaultModuleDescriptor}
|
||||
|
@ -35,9 +33,10 @@ import org.apache.ivy.core.retrieve.RetrieveOptions
|
|||
import org.apache.ivy.core.settings.IvySettings
|
||||
import org.apache.ivy.plugins.matcher.GlobPatternMatcher
|
||||
import org.apache.ivy.plugins.resolver.{ChainResolver, IBiblioResolver}
|
||||
|
||||
import org.apache.spark.executor.ExecutorURLClassLoader
|
||||
import org.apache.spark.util.Utils
|
||||
import org.apache.spark.executor.ChildExecutorURLClassLoader
|
||||
import org.apache.spark.executor.MutableURLClassLoader
|
||||
|
||||
/**
|
||||
* Main gateway of launching a Spark application.
|
||||
|
@ -389,8 +388,14 @@ object SparkSubmit {
|
|||
printStream.println("\n")
|
||||
}
|
||||
|
||||
val loader = new ExecutorURLClassLoader(new Array[URL](0),
|
||||
Thread.currentThread.getContextClassLoader)
|
||||
val loader =
|
||||
if (sysProps.getOrElse("spark.files.userClassPathFirst", "false").toBoolean) {
|
||||
new ChildExecutorURLClassLoader(new Array[URL](0),
|
||||
Thread.currentThread.getContextClassLoader)
|
||||
} else {
|
||||
new ExecutorURLClassLoader(new Array[URL](0),
|
||||
Thread.currentThread.getContextClassLoader)
|
||||
}
|
||||
Thread.currentThread.setContextClassLoader(loader)
|
||||
|
||||
for (jar <- childClasspath) {
|
||||
|
@ -434,7 +439,7 @@ object SparkSubmit {
|
|||
}
|
||||
}
|
||||
|
||||
private def addJarToClasspath(localJar: String, loader: ExecutorURLClassLoader) {
|
||||
private def addJarToClasspath(localJar: String, loader: MutableURLClassLoader) {
|
||||
val uri = Utils.resolveURI(localJar)
|
||||
uri.getScheme match {
|
||||
case "file" | "local" =>
|
||||
|
|
|
@ -39,7 +39,17 @@ private[spark] class ChildExecutorURLClassLoader(urls: Array[URL], parent: Class
|
|||
super.addURL(url)
|
||||
}
|
||||
override def findClass(name: String): Class[_] = {
|
||||
super.findClass(name)
|
||||
val loaded = super.findLoadedClass(name)
|
||||
if (loaded != null) {
|
||||
return loaded
|
||||
}
|
||||
try {
|
||||
super.findClass(name)
|
||||
} catch {
|
||||
case e: ClassNotFoundException => {
|
||||
parentClassLoader.loadClass(name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,10 +26,14 @@ import org.apache.spark.util.Utils
|
|||
|
||||
class ExecutorURLClassLoaderSuite extends FunSuite {
|
||||
|
||||
val childClassNames = List("FakeClass1", "FakeClass2")
|
||||
val parentClassNames = List("FakeClass1", "FakeClass2", "FakeClass3")
|
||||
val urls = List(TestUtils.createJarWithClasses(childClassNames, "1")).toArray
|
||||
val urls2 = List(TestUtils.createJarWithClasses(parentClassNames, "2")).toArray
|
||||
val urls2 = List(TestUtils.createJarWithClasses(
|
||||
classNames = Seq("FakeClass1", "FakeClass2", "FakeClass3"),
|
||||
toStringValue = "2")).toArray
|
||||
val urls = List(TestUtils.createJarWithClasses(
|
||||
classNames = Seq("FakeClass1"),
|
||||
classNamesWithBase = Seq(("FakeClass2", "FakeClass3")), // FakeClass3 is in parent
|
||||
toStringValue = "1",
|
||||
classpathUrls = urls2)).toArray
|
||||
|
||||
test("child first") {
|
||||
val parentLoader = new URLClassLoader(urls2, null)
|
||||
|
@ -37,6 +41,8 @@ class ExecutorURLClassLoaderSuite extends FunSuite {
|
|||
val fakeClass = classLoader.loadClass("FakeClass2").newInstance()
|
||||
val fakeClassVersion = fakeClass.toString
|
||||
assert(fakeClassVersion === "1")
|
||||
val fakeClass2 = classLoader.loadClass("FakeClass2").newInstance()
|
||||
assert(fakeClass.getClass === fakeClass2.getClass)
|
||||
}
|
||||
|
||||
test("parent first") {
|
||||
|
@ -45,6 +51,8 @@ class ExecutorURLClassLoaderSuite extends FunSuite {
|
|||
val fakeClass = classLoader.loadClass("FakeClass1").newInstance()
|
||||
val fakeClassVersion = fakeClass.toString
|
||||
assert(fakeClassVersion === "2")
|
||||
val fakeClass2 = classLoader.loadClass("FakeClass1").newInstance()
|
||||
assert(fakeClass.getClass === fakeClass2.getClass)
|
||||
}
|
||||
|
||||
test("child first can fall back") {
|
||||
|
|
Loading…
Reference in a new issue