Add test to verify if RDD is computed even if block manager has insufficient

memory
This commit is contained in:
Shivaram Venkataraman 2012-10-12 14:14:57 -07:00
parent 2cf40c5fd5
commit 8577523f37

View file

@ -27,6 +27,7 @@ class DistributedSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
sc = null
}
System.clearProperty("spark.reducer.maxMbInFlight")
System.clearProperty("spark.storage.memoryFraction")
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
System.clearProperty("spark.master.port")
}
@ -156,4 +157,13 @@ class DistributedSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
assert(data.count() === 1000)
assert(data.count() === 1000)
}
test("compute without caching with low memory") {
System.setProperty("spark.storage.memoryFraction", "0.0001")
sc = new SparkContext(clusterUrl, "test")
val data = sc.parallelize(1 to 4000000, 2).persist(StorageLevel.MEMORY_ONLY)
assert(data.count() === 4000000)
assert(data.count() === 4000000)
assert(data.count() === 4000000)
}
}