Modified project structure to work with buildr

This commit is contained in:
Matei Zaharia 2010-11-13 17:18:05 -08:00
parent f8966ffc11
commit b84769a107
52 changed files with 47 additions and 7 deletions

2
.gitignore vendored
View file

@ -8,3 +8,5 @@ third_party/libmesos.dylib
conf/java-opts
conf/spark-env.sh
conf/log4j.properties
target
reports

View file

@ -8,4 +8,4 @@ if [ -d $RESULTS_DIR ]; then
rm -r $RESULTS_DIR
fi
mkdir -p $RESULTS_DIR
$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/build/classes -u $RESULTS_DIR -o $@
$FWDIR/run org.scalatest.tools.Runner -p $FWDIR/target/test/classes -u $RESULTS_DIR -o $@

22
buildfile Normal file
View file

@ -0,0 +1,22 @@
require 'buildr/scala'
# Version number for this release
VERSION_NUMBER = "0.0.0"
# Group identifier for your projects
GROUP = "spark"
COPYRIGHT = ""
# Specify Maven 2.0 remote repositories here, like this:
repositories.remote << "http://www.ibiblio.org/maven2/"
THIRD_PARTY_JARS = Dir["third_party/**/*.jar"]
desc "The Spark project"
define "spark" do
project.version = VERSION_NUMBER
project.group = GROUP
manifest["Implementation-Vendor"] = COPYRIGHT
compile.with THIRD_PARTY_JARS
package(:jar)
test.using :scalatest, :fork => true
end

4
run
View file

@ -33,8 +33,8 @@ if [ -e $FWDIR/conf/java-opts ] ; then
fi
export JAVA_OPTS
# Build up classpath
CLASSPATH="$SPARK_CLASSPATH:$FWDIR/build/classes:$MESOS_CLASSPATH"
# build up classpath
CLASSPATH="$SPARK_CLASSPATH:$FWDIR/target/classes:$FWDIR/target/test/classes:$MESOS_CLASSPATH"
CLASSPATH+=:$FWDIR/conf
CLASSPATH+=:$FWDIR/third_party/mesos.jar
CLASSPATH+=:$FWDIR/third_party/asm-3.2/lib/all/asm-all-3.2.jar

View file

@ -310,7 +310,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
else xs.reduceLeft(_ zip _ takeWhile (x => x._1 == x._2) map (_._1) mkString)
// This is jline's entry point for completion.
override def complete(_buf: String, cursor: Int, candidates: JList[String]): Int = {
override def complete(_buf: String, cursor: Int, candidates: JList[_]): Int = {
val buf = onull(_buf)
verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
DBG("complete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
@ -321,7 +321,7 @@ class SparkCompletion(val repl: SparkInterpreter) extends SparkCompletionOutput
case Nil => None
case xs =>
// modify in place and return the position
xs foreach (candidates add _)
xs.foreach(x => candidates.asInstanceOf[JList[AnyRef]].add(x))
// update the last buffer unless this is an alternatives list
if (xs contains "") Some(p.cursor)

View file

@ -129,7 +129,8 @@ extends InterpreterControl {
settings.classpath append addedClasspath
interpreter = new SparkInterpreter(settings, out) {
override protected def parentClassLoader = classOf[SparkInterpreterLoop].getClassLoader
override protected def parentClassLoader =
classOf[SparkInterpreterLoop].getClassLoader
}
interpreter.setContextClassLoader()
// interpreter.quietBind("settings", "spark.repl.SparkInterpreterSettings", interpreter.isettings)

View file

@ -1,6 +1,10 @@
package spark.repl
import java.io._
import java.net.URLClassLoader
import scala.collection.mutable.ArrayBuffer
import scala.collection.JavaConversions._
import org.scalatest.FunSuite
@ -8,9 +12,20 @@ class ReplSuite extends FunSuite {
def runInterpreter(master: String, input: String): String = {
val in = new BufferedReader(new StringReader(input + "\n"))
val out = new StringWriter()
val cl = getClass.getClassLoader
var paths = new ArrayBuffer[String]
if (cl.isInstanceOf[URLClassLoader]) {
val urlLoader = cl.asInstanceOf[URLClassLoader]
for (url <- urlLoader.getURLs) {
if (url.getProtocol == "file") {
paths += url.getFile
}
}
}
val interp = new SparkInterpreterLoop(in, new PrintWriter(out), master)
spark.repl.Main.interp = interp
interp.main(new Array[String](0))
val separator = System.getProperty("path.separator")
interp.main(Array("-classpath", paths.mkString(separator)))
spark.repl.Main.interp = null
return out.toString
}