Renamed stream package to streaming

This commit is contained in:
Matei Zaharia 2012-07-29 13:35:22 -07:00
parent e2e71a1fb5
commit fcee4153b9
47 changed files with 51 additions and 51 deletions

View file

@ -1,3 +1,3 @@
#!/bin/bash
./run spark.stream.SentenceGenerator localhost 7078 sentences.txt 1
./run spark.streaming.SentenceGenerator localhost 7078 sentences.txt 1

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
case class BlockID(sRds: String, sInterval: Interval, sPartition: Int) {
override def toString : String = (

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.Logging

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.Logging

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import scala.collection.mutable.Map

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
case class Interval (val beginTime: Time, val endTime: Time) {

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
class Job(val time: Time, func: () => _) {
val id = Job.getNewId()

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkEnv
import spark.Logging

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.{Logging, SparkEnv}
import java.util.concurrent.Executors

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.Logging
import spark.storage.StorageLevel

View file

@ -1,6 +1,6 @@
package spark.stream
package spark.streaming
import spark.stream.SparkStreamContext._
import spark.streaming.SparkStreamContext._
import spark.RDD
import spark.BlockRDD
@ -41,7 +41,7 @@ extends Logging with Serializable {
@transient private val generatedRDDs = new HashMap[Time, RDD[T]] ()
// Variable to be set to the first time seen by the RDS (effective time zero)
private[stream] var zeroTime: Time = null
private[streaming] var zeroTime: Time = null
// Variable to specify storage level
private var storageLevel: StorageLevel = StorageLevel.NONE

View file

@ -1,6 +1,6 @@
package spark.stream
package spark.streaming
import spark.stream.SparkStreamContext._
import spark.streaming.SparkStreamContext._
import spark.RDD
import spark.UnionRDD

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkEnv
import spark.Logging

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import scala.util.Random
import scala.io.Source

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import java.net.{Socket, ServerSocket}
import java.io.{ByteArrayOutputStream, DataOutputStream, DataInputStream, BufferedInputStream}

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import scala.util.Random
import scala.io.Source

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import spark.SparkEnv

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import scala.util.Random
import scala.io.Source

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import scala.util.Random
import scala.io.Source

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.Logging

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.Logging
import scala.collection.mutable.{ArrayBuffer, HashMap}

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.Logging

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark._
import spark.storage._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark._
import spark.storage._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
abstract case class Time {

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,6 +1,6 @@
package spark.stream
package spark.streaming
import spark.stream.SparkStreamContext._
import spark.streaming.SparkStreamContext._
import spark.RDD
import spark.UnionRDD

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import spark.SparkContext
import SparkContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._
import spark.SparkContext

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._

View file

@ -1,4 +1,4 @@
package spark.stream
package spark.streaming
import SparkStreamContext._