From 5c886194e458c64fcf24066af351bde47dd8bf12 Mon Sep 17 00:00:00 2001 From: Christopher Nguyen Date: Sun, 16 Jun 2013 01:23:48 -0700 Subject: [PATCH] Move zero-length partition testing from JavaAPISuite.java to PartitioningSuite.scala --- core/src/test/scala/spark/JavaAPISuite.java | 22 ------------------- .../test/scala/spark/PartitioningSuite.scala | 21 ++++++++++++++++-- 2 files changed, 19 insertions(+), 24 deletions(-) diff --git a/core/src/test/scala/spark/JavaAPISuite.java b/core/src/test/scala/spark/JavaAPISuite.java index 3190a43e73..93bb69b41c 100644 --- a/core/src/test/scala/spark/JavaAPISuite.java +++ b/core/src/test/scala/spark/JavaAPISuite.java @@ -314,28 +314,6 @@ public class JavaAPISuite implements Serializable { List take = rdd.take(5); } - @Test - public void zeroLengthPartitions() { - // Create RDD with some consecutive empty partitions (including the "first" one) - JavaDoubleRDD rdd = sc - .parallelizeDoubles(Arrays.asList(-1.0, -1.0, -1.0, -1.0, 2.0, 4.0, -1.0, -1.0), 8) - .filter(new Function() { - @Override - public Boolean call(Double x) { - return x > 0.0; - } - }); - - // Run the partitions, including the consecutive empty ones, through StatCounter - StatCounter stats = rdd.stats(); - Assert.assertEquals(6.0, stats.sum(), 0.01); - Assert.assertEquals(6.0/2, rdd.mean(), 0.01); - Assert.assertEquals(1.0, rdd.variance(), 0.01); - Assert.assertEquals(1.0, rdd.stdev(), 0.01); - - // Add other tests here for classes that should be able to handle empty partitions correctly - } - @Test public void map() { JavaRDD rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5)); diff --git a/core/src/test/scala/spark/PartitioningSuite.scala b/core/src/test/scala/spark/PartitioningSuite.scala index 60db759c25..e5745c81b3 100644 --- a/core/src/test/scala/spark/PartitioningSuite.scala +++ b/core/src/test/scala/spark/PartitioningSuite.scala @@ -1,10 +1,10 @@ package spark import org.scalatest.FunSuite - import scala.collection.mutable.ArrayBuffer - import SparkContext._ +import spark.util.StatCounter +import scala.math._ class PartitioningSuite extends FunSuite with LocalSparkContext { @@ -120,4 +120,21 @@ class PartitioningSuite extends FunSuite with LocalSparkContext { assert(intercept[SparkException]{ arrPairs.reduceByKeyLocally(_ + _) }.getMessage.contains("array")) assert(intercept[SparkException]{ arrPairs.reduceByKey(_ + _) }.getMessage.contains("array")) } + + test("Zero-length partitions should be correctly handled") { + // Create RDD with some consecutive empty partitions (including the "first" one) + sc = new SparkContext("local", "test") + val rdd: RDD[Double] = sc + .parallelize(Array(-1.0, -1.0, -1.0, -1.0, 2.0, 4.0, -1.0, -1.0), 8) + .filter(_ >= 0.0) + + // Run the partitions, including the consecutive empty ones, through StatCounter + val stats: StatCounter = rdd.stats(); + assert(abs(6.0 - stats.sum) < 0.01); + assert(abs(6.0/2 - rdd.mean) < 0.01); + assert(abs(1.0 - rdd.variance) < 0.01); + assert(abs(1.0 - rdd.stdev) < 0.01); + + // Add other tests here for classes that should be able to handle empty partitions correctly + } }