[SPARK-2924] remove default args to overloaded methods

Not supported in Scala 2.11. Split them into separate methods instead.

Author: Anand Avati <avati@redhat.com>

Closes #1704 from avati/SPARK-1812-default-args and squashes the following commits:

3e3924a [Anand Avati] SPARK-1812: Add Mima excludes for the broken ABI
901dfc7 [Anand Avati] SPARK-1812: core - Fix overloaded methods with default arguments
07f00af [Anand Avati] SPARK-1812: streaming - Fix overloaded methods with default arguments
This commit is contained in:
Anand Avati 2014-08-15 08:53:52 -07:00 committed by Patrick Wendell
parent fba8ec39cc
commit 7589c39d39
3 changed files with 11 additions and 2 deletions

View file

@ -93,7 +93,7 @@ private[spark] object JettyUtils extends Logging {
def createServletHandler(
path: String,
servlet: HttpServlet,
basePath: String = ""): ServletContextHandler = {
basePath: String): ServletContextHandler = {
val prefixedPath = attachPrefix(basePath, path)
val contextHandler = new ServletContextHandler
val holder = new ServletHolder(servlet)

View file

@ -117,6 +117,9 @@ object MimaExcludes {
) ++
Seq( // new Vector methods in MLlib (binary compatible assuming users do not implement Vector)
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.mllib.linalg.Vector.copy")
) ++
Seq ( // Scala 2.11 compatibility fix
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.StreamingContext.<init>$default$2")
)
case v if v.startsWith("1.0") =>
Seq(

View file

@ -98,9 +98,15 @@ class StreamingContext private[streaming] (
* @param hadoopConf Optional, configuration object if necessary for reading from
* HDFS compatible filesystems
*/
def this(path: String, hadoopConf: Configuration = new Configuration) =
def this(path: String, hadoopConf: Configuration) =
this(null, CheckpointReader.read(path, new SparkConf(), hadoopConf).get, null)
/**
* Recreate a StreamingContext from a checkpoint file.
* @param path Path to the directory that was specified as the checkpoint directory
*/
def this(path: String) = this(path, new Configuration)
if (sc_ == null && cp_ == null) {
throw new Exception("Spark Streaming cannot be initialized with " +
"both SparkContext and checkpoint as null")