adding spark.streaming.blockInterval property
This commit is contained in:
parent
8ac9efba5a
commit
ab0f834dbb
|
@ -253,6 +253,13 @@ Apart from these, the following properties are also available, and may be useful
|
|||
applications). Note that any RDD that persists in memory for more than this duration will be cleared as well.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>spark.streaming.blockInterval</td>
|
||||
<td>200</td>
|
||||
<td>
|
||||
Duration (milliseconds) of how long to batch new objects coming from network receivers.
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
|
||||
|
|
|
@ -198,7 +198,7 @@ abstract class NetworkReceiver[T: ClassManifest]() extends Serializable with Log
|
|||
case class Block(id: String, iterator: Iterator[T], metadata: Any = null)
|
||||
|
||||
val clock = new SystemClock()
|
||||
val blockInterval = 200L
|
||||
val blockInterval = System.getProperty("spark.streaming.blockInterval", "200").toLong
|
||||
val blockIntervalTimer = new RecurringTimer(clock, blockInterval, updateCurrentBuffer)
|
||||
val blockStorageLevel = storageLevel
|
||||
val blocksForPushing = new ArrayBlockingQueue[Block](1000)
|
||||
|
|
Loading…
Reference in a new issue