SPARK-1652: Spark submit should fail gracefully if YARN not enabled

Author: Patrick Wendell <pwendell@gmail.com>

Closes #579 from pwendell/spark-submit-yarn-2 and squashes the following commits:

05e1b11 [Patrick Wendell] Small fix
d2a40ad [Patrick Wendell] SPARK-1652: Spark submit should fail gracefully if YARN support not enabled
This commit is contained in:
Patrick Wendell 2014-04-28 17:26:57 -07:00
parent 8421034e79
commit cae054aaf4
2 changed files with 16 additions and 0 deletions

View file

@ -23,6 +23,7 @@ import java.net.{URI, URL}
import scala.collection.mutable.{ArrayBuffer, HashMap, Map} import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
import org.apache.spark.executor.ExecutorURLClassLoader import org.apache.spark.executor.ExecutorURLClassLoader
import org.apache.spark.util.Utils
/** /**
* Scala code behind the spark-submit script. The script handles setting up the classpath with * Scala code behind the spark-submit script. The script handles setting up the classpath with
@ -128,6 +129,15 @@ object SparkSubmit {
childArgs += ("--class", appArgs.mainClass) childArgs += ("--class", appArgs.mainClass)
} }
if (clusterManager == YARN) {
// The choice of class is arbitrary, could use any spark-yarn class
if (!Utils.classIsLoadable("org.apache.spark.deploy.yarn.Client") && !Utils.isTesting) {
val msg = "Could not load YARN classes. This copy of Spark may not have been compiled " +
"with YARN support."
throw new Exception(msg)
}
}
val options = List[OptionAssigner]( val options = List[OptionAssigner](
new OptionAssigner(appArgs.master, ALL_CLUSTER_MGRS, false, sysProp = "spark.master"), new OptionAssigner(appArgs.master, ALL_CLUSTER_MGRS, false, sysProp = "spark.master"),
new OptionAssigner(appArgs.driverExtraClassPath, STANDALONE | YARN, true, new OptionAssigner(appArgs.driverExtraClassPath, STANDALONE | YARN, true,

View file

@ -28,6 +28,7 @@ import scala.collection.Map
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
import scala.io.Source import scala.io.Source
import scala.reflect.ClassTag import scala.reflect.ClassTag
import scala.util.Try
import com.google.common.io.Files import com.google.common.io.Files
import org.apache.commons.lang.SystemUtils import org.apache.commons.lang.SystemUtils
@ -137,6 +138,11 @@ private[spark] object Utils extends Logging {
def getContextOrSparkClassLoader = def getContextOrSparkClassLoader =
Option(Thread.currentThread().getContextClassLoader).getOrElse(getSparkClassLoader) Option(Thread.currentThread().getContextClassLoader).getOrElse(getSparkClassLoader)
/** Determines whether the provided class is loadable in the current thread. */
def classIsLoadable(clazz: String): Boolean = {
Try { Class.forName(clazz, false, getContextOrSparkClassLoader) }.isSuccess
}
/** /**
* Primitive often used when writing {@link java.nio.ByteBuffer} to {@link java.io.DataOutput}. * Primitive often used when writing {@link java.nio.ByteBuffer} to {@link java.io.DataOutput}.
*/ */