[SPARK-12187] *MemoryPool classes should not be fully public

This patch tightens them to `private[memory]`.

Author: Andrew Or <andrew@databricks.com>

Closes #10182 from andrewor14/memory-visibility.
This commit is contained in:
Andrew Or 2015-12-08 14:34:15 -08:00 committed by Josh Rosen
parent 2ff17bcfb1
commit 9494521695
4 changed files with 4 additions and 4 deletions

View file

@ -39,7 +39,7 @@ import org.apache.spark.Logging
* @param lock a [[MemoryManager]] instance to synchronize on
* @param poolName a human-readable name for this pool, for use in log messages
*/
class ExecutionMemoryPool(
private[memory] class ExecutionMemoryPool(
lock: Object,
poolName: String
) extends MemoryPool(lock) with Logging {

View file

@ -27,7 +27,7 @@ import javax.annotation.concurrent.GuardedBy
* to `Object` to avoid programming errors, since this object should only be used for
* synchronization purposes.
*/
abstract class MemoryPool(lock: Object) {
private[memory] abstract class MemoryPool(lock: Object) {
@GuardedBy("lock")
private[this] var _poolSize: Long = 0

View file

@ -31,7 +31,7 @@ import org.apache.spark.storage.{MemoryStore, BlockStatus, BlockId}
*
* @param lock a [[MemoryManager]] instance to synchronize on
*/
class StorageMemoryPool(lock: Object) extends MemoryPool(lock) with Logging {
private[memory] class StorageMemoryPool(lock: Object) extends MemoryPool(lock) with Logging {
@GuardedBy("lock")
private[this] var _memoryUsed: Long = 0L

View file

@ -49,7 +49,7 @@ import org.apache.spark.storage.{BlockStatus, BlockId}
private[spark] class UnifiedMemoryManager private[memory] (
conf: SparkConf,
val maxMemory: Long,
private val storageRegionSize: Long,
storageRegionSize: Long,
numCores: Int)
extends MemoryManager(
conf,