Few more fixes to tests broken during merge

This commit is contained in:
Prashant Sharma 2013-09-10 10:57:47 +05:30
parent 4106ae9fbf
commit 6fcfefcb27
2 changed files with 3 additions and 53 deletions

View file

@ -1,50 +0,0 @@
package org.apache.spark.repl
import java.io.BufferedReader
import java.io.PrintWriter
import java.io.StringReader
import java.io.StringWriter
import java.net.URLClassLoader
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.future
trait ReplSuiteMixin {
def runInterpreter(master: String, input: String): String = {
val in = new BufferedReader(new StringReader(input + "\n"))
val out = new StringWriter()
val cl = getClass.getClassLoader
var paths = new ArrayBuffer[String]
if (cl.isInstanceOf[URLClassLoader]) {
val urlLoader = cl.asInstanceOf[URLClassLoader]
for (url <- urlLoader.getURLs) {
if (url.getProtocol == "file") {
paths += url.getFile
}
}
}
val interp = new SparkILoop(in, new PrintWriter(out), master)
spark.repl.Main.interp = interp
val separator = System.getProperty("path.separator")
interp.process(Array("-classpath", paths.mkString(separator)))
if (interp != null) {
interp.closeInterpreter()
}
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
System.clearProperty("spark.hostPort")
return out.toString
}
def assertContains(message: String, output: String) {
assert(output contains message,
"Interpreter output did not contain '" + message + "':\n" + output)
}
def assertDoesNotContain(message: String, output: String) {
assert(!(output contains message),
"Interpreter output contained '" + message + "':\n" + output)
}
}

View file

@ -29,9 +29,9 @@ import org.scalatest.BeforeAndAfter
import com.google.common.io.Files
import spark.streaming.StreamingContext.toPairDStreamFunctions
import spark.streaming.dstream.FileInputDStream
import spark.streaming.util.ManualClock
import org.apache.spark.streaming.StreamingContext.toPairDStreamFunctions
import org.apache.spark.streaming.dstream.FileInputDStream
import org.apache.spark.streaming.util.ManualClock