[Test] Better exception message from SparkSubmitSuite

Before:
```
Exception in thread "main" java.lang.Exception: Could not load user defined classes inside of executors
	at org.apache.spark.deploy.JarCreationTest$.main(SparkSubmitSuite.scala:471)
	at org.apache.spark.deploy.JarCreationTest.main(SparkSubmitSuite.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
```
After:
```
Exception in thread "main" java.lang.Exception: Could not load user class from jar:
java.lang.UnsupportedClassVersionError: SparkSubmitClassA : Unsupported major.minor version 51.0
	java.lang.ClassLoader.defineClass1(Native Method)
	java.lang.ClassLoader.defineClass(ClassLoader.java:643)
	...
	at org.apache.spark.deploy.JarCreationTest$.main(SparkSubmitSuite.scala:472)
	at org.apache.spark.deploy.JarCreationTest.main(SparkSubmitSuite.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
```

Author: Andrew Or <andrew@databricks.com>

Closes #3212 from andrewor14/submit-suite-message and squashes the following commits:

7779248 [Andrew Or] Format exception
8fe6719 [Andrew Or] Better exception message from failed test
This commit is contained in:
Andrew Or 2014-11-12 13:35:48 -08:00
parent 36ddeb7bf8
commit 6e3c5a296c

View file

@ -21,7 +21,7 @@ import java.io._
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkException, TestUtils}
import org.apache.spark._
import org.apache.spark.deploy.SparkSubmit._
import org.apache.spark.util.Utils
import org.scalatest.FunSuite
@ -451,24 +451,25 @@ class SparkSubmitSuite extends FunSuite with Matchers {
}
}
object JarCreationTest {
object JarCreationTest extends Logging {
def main(args: Array[String]) {
Utils.configTestLog4j("INFO")
val conf = new SparkConf()
val sc = new SparkContext(conf)
val result = sc.makeRDD(1 to 100, 10).mapPartitions { x =>
var foundClasses = false
var exception: String = null
try {
Class.forName("SparkSubmitClassA", true, Thread.currentThread().getContextClassLoader)
Class.forName("SparkSubmitClassA", true, Thread.currentThread().getContextClassLoader)
foundClasses = true
} catch {
case _: Throwable => // catch all
case t: Throwable =>
exception = t + "\n" + t.getStackTraceString
exception = exception.replaceAll("\n", "\n\t")
}
Seq(foundClasses).iterator
Option(exception).toSeq.iterator
}.collect()
if (result.contains(false)) {
throw new Exception("Could not load user defined classes inside of executors")
if (result.nonEmpty) {
throw new Exception("Could not load user class from jar:\n" + result(0))
}
}
}