syntax errors

This commit is contained in:
Denny 2012-07-18 12:18:00 -07:00
parent 1d98884548
commit e4dbaf653f
11 changed files with 11 additions and 11 deletions

View file

@ -9,7 +9,7 @@ object BroadcastTest {
System.exit(1) System.exit(1)
} }
val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val slices = if (args.length > 1) args(1).toInt else 2 val slices = if (args.length > 1) args(1).toInt else 2
val num = if (args.length > 2) args(2).toInt else 1000000 val num = if (args.length > 2) args(2).toInt else 1000000

View file

@ -9,7 +9,7 @@ object ExceptionHandlingTest {
System.exit(1) System.exit(1)
} }
val sc = new SparkContext(args(0), "ExceptionHandlingTest", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "ExceptionHandlingTest", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
sc.parallelize(0 until sc.defaultParallelism).foreach { i => sc.parallelize(0 until sc.defaultParallelism).foreach { i =>
if (Math.random > 0.75) if (Math.random > 0.75)
throw new Exception("Testing exception handling") throw new Exception("Testing exception handling")

View file

@ -16,7 +16,7 @@ object GroupByTest {
var valSize = if (args.length > 3) args(3).toInt else 1000 var valSize = if (args.length > 3) args(3).toInt else 1000
var numReducers = if (args.length > 4) args(4).toInt else numMappers var numReducers = if (args.length > 4) args(4).toInt else numMappers
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p => val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p =>
val ranGen = new Random val ranGen = new Random

View file

@ -4,7 +4,7 @@ import spark._
object HdfsTest { object HdfsTest {
def main(args: Array[String]) { def main(args: Array[String]) {
val sc = new SparkContext(args(0), "HdfsTest", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "HdfsTest", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val file = sc.textFile(args(1)) val file = sc.textFile(args(1))
val mapped = file.map(s => s.length).cache() val mapped = file.map(s => s.length).cache()
for (iter <- 1 to 10) { for (iter <- 1 to 10) {

View file

@ -9,7 +9,7 @@ object MultiBroadcastTest {
System.exit(1) System.exit(1)
} }
val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val spark = new SparkContext(args(0), "Broadcast Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val slices = if (args.length > 1) args(1).toInt else 2 val slices = if (args.length > 1) args(1).toInt else 2
val num = if (args.length > 2) args(2).toInt else 1000000 val num = if (args.length > 2) args(2).toInt else 1000000

View file

@ -18,7 +18,7 @@ object SimpleSkewedGroupByTest {
var numReducers = if (args.length > 4) args(4).toInt else numMappers var numReducers = if (args.length > 4) args(4).toInt else numMappers
var ratio = if (args.length > 5) args(5).toInt else 5.0 var ratio = if (args.length > 5) args(5).toInt else 5.0
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p => val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p =>
val ranGen = new Random val ranGen = new Random

View file

@ -16,7 +16,7 @@ object SkewedGroupByTest {
var valSize = if (args.length > 3) args(3).toInt else 1000 var valSize = if (args.length > 3) args(3).toInt else 1000
var numReducers = if (args.length > 4) args(4).toInt else numMappers var numReducers = if (args.length > 4) args(4).toInt else numMappers
val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "GroupBy Test", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p => val pairs1 = sc.parallelize(0 until numMappers, numMappers).flatMap { p =>
val ranGen = new Random val ranGen = new Random

View file

@ -112,7 +112,7 @@ object SparkALS {
} }
} }
printf("Running with M=%d, U=%d, F=%d, iters=%d\n", M, U, F, ITERATIONS); printf("Running with M=%d, U=%d, F=%d, iters=%d\n", M, U, F, ITERATIONS);
val spark = new SparkContext(host, "SparkALS", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val spark = new SparkContext(host, "SparkALS", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val R = generateR() val R = generateR()

View file

@ -29,7 +29,7 @@ object SparkHdfsLR {
System.err.println("Usage: SparkHdfsLR <master> <file> <iters>") System.err.println("Usage: SparkHdfsLR <master> <file> <iters>")
System.exit(1) System.exit(1)
} }
val sc = new SparkContext(args(0), "SparkHdfsLR", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "SparkHdfsLR", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val lines = sc.textFile(args(1)) val lines = sc.textFile(args(1))
val points = lines.map(parsePoint _).cache() val points = lines.map(parsePoint _).cache()
val ITERATIONS = args(2).toInt val ITERATIONS = args(2).toInt

View file

@ -37,7 +37,7 @@ object SparkKMeans {
System.err.println("Usage: SparkLocalKMeans <master> <file> <k> <convergeDist>") System.err.println("Usage: SparkLocalKMeans <master> <file> <k> <convergeDist>")
System.exit(1) System.exit(1)
} }
val sc = new SparkContext(args(0), "SparkLocalKMeans", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "SparkLocalKMeans", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val lines = sc.textFile(args(1)) val lines = sc.textFile(args(1))
val data = lines.map(parseVector _).cache() val data = lines.map(parseVector _).cache()
val K = args(2).toInt val K = args(2).toInt

View file

@ -28,7 +28,7 @@ object SparkLR {
System.err.println("Usage: SparkLR <host> [<slices>]") System.err.println("Usage: SparkLR <host> [<slices>]")
System.exit(1) System.exit(1)
} }
val sc = new SparkContext(args(0), "SparkLR", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")) val sc = new SparkContext(args(0), "SparkLR", System.getenv("SPARK_HOME"), List(System.getenv("EXAMPLES_JAR")))
val numSlices = if (args.length > 1) args(1).toInt else 2 val numSlices = if (args.length > 1) args(1).toInt else 2
val data = generateData val data = generateData