Don't throw an error in the block manager when a block is cached on the master due to
a locally computed operation Conflicts: core/src/main/scala/spark/storage/BlockManagerMaster.scala
This commit is contained in:
parent
f63a40fd99
commit
e782187b4a
|
@ -243,6 +243,12 @@ private[spark] class BlockManagerMasterActor(val isLocal: Boolean) extends Actor
|
|||
val startTimeMs = System.currentTimeMillis()
|
||||
val tmp = " " + blockManagerId + " " + blockId + " "
|
||||
|
||||
if (!blockManagerInfo.contains(blockManagerId)) {
|
||||
// Can happen if this is from a locally cached partition on the master
|
||||
sender ! true
|
||||
return
|
||||
}
|
||||
|
||||
if (blockId == null) {
|
||||
blockManagerInfo(blockManagerId).updateLastSeenMs()
|
||||
logDebug("Got in heartBeat 1" + tmp + " used " + Utils.getUsedTimeMs(startTimeMs))
|
||||
|
|
Loading…
Reference in a new issue