Use maxMemory to better estimate memory available for BlockManager cache

This commit is contained in:
Shivaram Venkataraman 2012-08-02 12:05:05 -07:00
parent b980eabd86
commit 6790908b11

View file

@ -580,6 +580,6 @@ class BlockManager(val master: BlockManagerMaster, val serializer: Serializer, m
object BlockManager {
def getMaxMemoryFromSystemProperties(): Long = {
val memoryFraction = System.getProperty("spark.storage.memoryFraction", "0.66").toDouble
(Runtime.getRuntime.totalMemory * memoryFraction).toLong
(Runtime.getRuntime.maxMemory * memoryFraction).toLong
}
}