Slight refactoring to SparkContext functions
This commit is contained in:
parent
cfcd77b5da
commit
b10199413a
|
@ -581,10 +581,17 @@ class SparkContext(
|
|||
* Return pools for fair scheduler
|
||||
* TODO(xiajunluan): We should take nested pools into account
|
||||
*/
|
||||
def getPools: ArrayBuffer[Schedulable] = {
|
||||
def getAllPools: ArrayBuffer[Schedulable] = {
|
||||
taskScheduler.rootPool.schedulableQueue
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the pool associated with the given name, if one exists
|
||||
*/
|
||||
def getPoolForName(pool: String): Option[Schedulable] = {
|
||||
taskScheduler.rootPool.schedulableNameToSchedulable.get(pool)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return current scheduling mode
|
||||
*/
|
||||
|
@ -592,10 +599,6 @@ class SparkContext(
|
|||
taskScheduler.schedulingMode
|
||||
}
|
||||
|
||||
def getPoolNameToPool: HashMap[String, Schedulable] = {
|
||||
taskScheduler.rootPool.schedulableNameToSchedulable
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the job's list of files added by `addFile` so that they do not get downloaded to
|
||||
* any new nodes.
|
||||
|
|
|
@ -38,7 +38,7 @@ private[spark] class FIFOSource() extends PoolSource {
|
|||
*/
|
||||
private[spark] class FairSource(sc: SparkContext) extends PoolSource {
|
||||
def getPools: Seq[Schedulable] = {
|
||||
sc.getPools.toSeq
|
||||
sc.getAllPools.toSeq
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ private[spark] class FairSource(sc: SparkContext) extends PoolSource {
|
|||
private[spark] class PoolDetailSource(sc: SparkContext, poolName: String) extends PoolSource {
|
||||
def getPools: Seq[Schedulable] = {
|
||||
val pools = HashSet[Schedulable]()
|
||||
pools += sc.getPoolNameToPool(poolName)
|
||||
pools += sc.getPoolForName(poolName).get
|
||||
pools.toSeq
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue