Added standalone cluster repl suite
This commit is contained in:
parent
f31e41c270
commit
19b0256ae4
|
@ -1,48 +1,12 @@
|
|||
package spark.repl
|
||||
|
||||
import java.io._
|
||||
import java.net.URLClassLoader
|
||||
|
||||
import scala.collection.mutable.ArrayBuffer
|
||||
import scala.collection.JavaConversions._
|
||||
import java.io.FileWriter
|
||||
|
||||
import org.scalatest.FunSuite
|
||||
|
||||
import com.google.common.io.Files
|
||||
|
||||
class ReplSuite extends FunSuite {
|
||||
def runInterpreter(master: String, input: String): String = {
|
||||
val in = new BufferedReader(new StringReader(input + "\n"))
|
||||
val out = new StringWriter()
|
||||
val cl = getClass.getClassLoader
|
||||
var paths = new ArrayBuffer[String]
|
||||
if (cl.isInstanceOf[URLClassLoader]) {
|
||||
val urlLoader = cl.asInstanceOf[URLClassLoader]
|
||||
for (url <- urlLoader.getURLs) {
|
||||
if (url.getProtocol == "file") {
|
||||
paths += url.getFile
|
||||
}
|
||||
}
|
||||
}
|
||||
val interp = new SparkILoop(in, new PrintWriter(out), master)
|
||||
spark.repl.Main.interp = interp
|
||||
val separator = System.getProperty("path.separator")
|
||||
interp.process(Array("-classpath", paths.mkString(separator)))
|
||||
if (interp != null)
|
||||
interp.closeInterpreter();
|
||||
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
|
||||
System.clearProperty("spark.master.port")
|
||||
return out.toString
|
||||
}
|
||||
|
||||
def assertContains(message: String, output: String) {
|
||||
assert(output contains message,
|
||||
"Interpreter output did not contain '" + message + "':\n" + output)
|
||||
}
|
||||
|
||||
def assertDoesNotContain(message: String, output: String) {
|
||||
assert(!(output contains message),
|
||||
"Interpreter output contained '" + message + "':\n" + output)
|
||||
}
|
||||
class ReplSuite extends FunSuite with ReplSuiteMixin {
|
||||
|
||||
test("simple foreach with accumulator") {
|
||||
val output = runInterpreter("local", """
|
||||
|
@ -163,4 +127,5 @@ class ReplSuite extends FunSuite {
|
|||
assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
56
repl/src/test/scala/spark/repl/ReplSuiteMixin.scala
Normal file
56
repl/src/test/scala/spark/repl/ReplSuiteMixin.scala
Normal file
|
@ -0,0 +1,56 @@
|
|||
package spark.repl
|
||||
|
||||
import java.io.BufferedReader
|
||||
import java.io.PrintWriter
|
||||
import java.io.StringReader
|
||||
import java.io.StringWriter
|
||||
import java.net.URLClassLoader
|
||||
|
||||
import scala.collection.mutable.ArrayBuffer
|
||||
import scala.concurrent.ExecutionContext.Implicits.global
|
||||
import scala.concurrent.future
|
||||
|
||||
import spark.deploy.master.Master
|
||||
import spark.deploy.worker.Worker
|
||||
|
||||
trait ReplSuiteMixin {
|
||||
def setupStandaloneCluster() {
|
||||
future { Master.main(Array("-i", "127.0.1.2", "-p", "7089")) }
|
||||
Thread.sleep(2000)
|
||||
future { Worker.main(Array("spark://127.0.1.2:7089", "--webui-port", "0")) }
|
||||
}
|
||||
|
||||
def runInterpreter(master: String, input: String): String = {
|
||||
val in = new BufferedReader(new StringReader(input + "\n"))
|
||||
val out = new StringWriter()
|
||||
val cl = getClass.getClassLoader
|
||||
var paths = new ArrayBuffer[String]
|
||||
if (cl.isInstanceOf[URLClassLoader]) {
|
||||
val urlLoader = cl.asInstanceOf[URLClassLoader]
|
||||
for (url <- urlLoader.getURLs) {
|
||||
if (url.getProtocol == "file") {
|
||||
paths += url.getFile
|
||||
}
|
||||
}
|
||||
}
|
||||
val interp = new SparkILoop(in, new PrintWriter(out), master)
|
||||
spark.repl.Main.interp = interp
|
||||
val separator = System.getProperty("path.separator")
|
||||
interp.process(Array("-classpath", paths.mkString(separator)))
|
||||
if (interp != null)
|
||||
interp.closeInterpreter();
|
||||
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
|
||||
System.clearProperty("spark.master.port")
|
||||
return out.toString
|
||||
}
|
||||
|
||||
def assertContains(message: String, output: String) {
|
||||
assert(output contains message,
|
||||
"Interpreter output did not contain '" + message + "':\n" + output)
|
||||
}
|
||||
|
||||
def assertDoesNotContain(message: String, output: String) {
|
||||
assert(!(output contains message),
|
||||
"Interpreter output contained '" + message + "':\n" + output)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
package spark.repl
|
||||
|
||||
import org.scalatest.FunSuite
|
||||
|
||||
class StandaloneClusterReplSuite extends FunSuite with ReplSuiteMixin {
|
||||
setupStandaloneCluster
|
||||
|
||||
test("simple collect") {
|
||||
val output = runInterpreter("spark://127.0.1.2:7089", """
|
||||
var x = 123
|
||||
val data = sc.parallelize(1 to 3).map(_ + x)
|
||||
data.take(3)
|
||||
""")
|
||||
assertDoesNotContain("error:", output)
|
||||
assertDoesNotContain("Exception", output)
|
||||
assertContains("124", output)
|
||||
assertContains("125", output)
|
||||
assertContains("126", output)
|
||||
}
|
||||
|
||||
test("simple foreach with accumulator") {
|
||||
val output = runInterpreter("spark://127.0.1.2:7089", """
|
||||
val accum = sc.accumulator(0)
|
||||
sc.parallelize(1 to 10).foreach(x => accum += x)
|
||||
accum.value
|
||||
""")
|
||||
assertDoesNotContain("error:", output)
|
||||
assertDoesNotContain("Exception", output)
|
||||
assertContains("res1: Int = 55", output)
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in a new issue