diff --git a/core/src/main/scala/spark/Executor.scala b/core/src/main/scala/spark/Executor.scala index f32e66bef4..7764acad1a 100644 --- a/core/src/main/scala/spark/Executor.scala +++ b/core/src/main/scala/spark/Executor.scala @@ -108,7 +108,7 @@ class Executor extends mesos.Executor with Logging { loader = { try { val klass = Class.forName("spark.repl.ExecutorClassLoader").asInstanceOf[Class[_ <: ClassLoader]] - val constructor = klass.getConstructor(classUri.getClass, loader.getClass) + val constructor = klass.getConstructor(classOf[String], classOf[ClassLoader]) constructor.newInstance(classUri, loader) } catch { case _: ClassNotFoundException => loader diff --git a/core/src/main/scala/spark/HadoopFile.scala b/core/src/main/scala/spark/HadoopFile.scala index 0a7996c7bd..beb53ce1a5 100644 --- a/core/src/main/scala/spark/HadoopFile.scala +++ b/core/src/main/scala/spark/HadoopFile.scala @@ -3,6 +3,7 @@ package spark import mesos.SlaveOffer import org.apache.hadoop.io.LongWritable +import org.apache.hadoop.io.NullWritable import org.apache.hadoop.io.Text import org.apache.hadoop.mapred.FileInputFormat import org.apache.hadoop.mapred.InputFormat @@ -51,6 +52,15 @@ extends RDD[(K, V)](sc) { .asInstanceOf[InputFormat[K, V]] } + // Helper method for creating a Hadoop Writable, because the commonly used + // NullWritable class has no constructor + def createWritable[T](clazz: Class[T]): T = { + if (clazz == classOf[NullWritable]) + NullWritable.get().asInstanceOf[T] + else + clazz.newInstance() + } + override def splits = splits_ override def compute(theSplit: Split) = new Iterator[(K, V)] { @@ -63,8 +73,8 @@ extends RDD[(K, V)](sc) { val fmt = createInputFormat(conf) reader = fmt.getRecordReader(split.inputSplit.value, conf, Reporter.NULL) - val key: K = keyClass.newInstance() - val value: V = valueClass.newInstance() + val key: K = createWritable(keyClass) + val value: V = createWritable(valueClass) var gotNext = false var finished = false diff --git a/project/build/SparkProject.scala b/project/build/SparkProject.scala index 7ebd92fde6..f33b42b986 100644 --- a/project/build/SparkProject.scala +++ b/project/build/SparkProject.scala @@ -16,10 +16,11 @@ class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProje lazy val bagel = project("bagel", "Bagel", new BagelProject(_), core) - lazy val jettyWebapp = "org.eclipse.jetty" % "jetty-webapp" % "7.4.1.v20110513" % "provided" - trait BaseProject extends BasicScalaProject with ScalaPaths with BasicPackagePaths with Eclipsify with IdeaProject { override def compileOptions = super.compileOptions ++ Seq(Unchecked) + + lazy val jettyServer = "org.eclipse.jetty" % "jetty-server" % "7.4.2.v20110526" + override def packageDocsJar = defaultJarPath("-javadoc.jar") override def packageSrcJar= defaultJarPath("-sources.jar") lazy val sourceArtifact = Artifact.sources(artifactID) @@ -38,21 +39,16 @@ class SparkProject(info: ProjectInfo) extends ParentProject(info) with IdeaProje val asm = "asm" % "asm-all" % "3.3.1" val scalaTest = "org.scalatest" % "scalatest_2.9.0" % "1.4.1" % "test" val scalaCheck = "org.scala-tools.testing" % "scalacheck_2.9.0" % "1.8" % "test" - val jetty = jettyWebapp } - class ReplProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport { - val jetty = jettyWebapp - } + class ReplProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport class ExamplesProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject { val colt = "colt" % "colt" % "1.2.0" } - class BagelProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport { - val jetty = jettyWebapp - } - + class BagelProject(info: ProjectInfo) extends DefaultProject(info) with BaseProject with DepJar with XmlTestReport + }