[SPARK-2331] SparkContext.emptyRDD should return RDD[T] not EmptyRDD[T]

Author: Reynold Xin <rxin@databricks.com>

Closes #10394 from rxin/SPARK-2331.
This commit is contained in:
Reynold Xin 2015-12-21 14:07:48 -08:00 committed by Andrew Or
parent b0849b8aea
commit a820ca19de
2 changed files with 4 additions and 1 deletions

View file

@ -1248,7 +1248,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
}
/** Get an RDD that has no partitions or elements. */
def emptyRDD[T: ClassTag]: EmptyRDD[T] = new EmptyRDD[T](this)
def emptyRDD[T: ClassTag]: RDD[T] = new EmptyRDD[T](this)
// Methods for creating shared variables

View file

@ -34,6 +34,9 @@ import com.typesafe.tools.mima.core.ProblemFilters._
object MimaExcludes {
def excludes(version: String) = version match {
case v if v.startsWith("2.0") =>
Seq(
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.SparkContext.emptyRDD")
) ++
// When 1.6 is officially released, update this exclusion list.
Seq(
MimaBuild.excludeSparkPackage("deploy"),