[SPARK-33048][BUILD] Fix SparkBuild.scala to recognize build settings for Scala 2.13
### What changes were proposed in this pull request? This PR fixes `SparkBuild.scala` to recognize build settings for Scala 2.13. In `SparkBuild.scala`, a variable `scalaBinaryVersion` is hardcoded as `2.12`. So, an environment variable `SPARK_SCALA_VERSION` is also to be `2.12`. This issue causes some test suites (e.g. `SparkSubmitSuite`) to be error. ``` ===== TEST OUTPUT FOR o.a.s.deploy.SparkSubmitSuite: 'user classpath first in driver' ===== 20/10/02 08:55:30.234 redirect stderr for command /home/kou/work/oss/spark-scala-2.13/bin/spark-submit INFO Utils: Error: Could not find or load m ain class org.apache.spark.launcher.Main 20/10/02 08:55:30.235 redirect stderr for command /home/kou/work/oss/spark-scala-2.13/bin/spark-submit INFO Utils: /home/kou/work/oss/spark-scala- 2.13/bin/spark-class: line 96: CMD: bad array subscript ``` The reason of this error is that environment variables `SPARK_JARS_DIR` and `LAUNCH_CLASSPATH` is defined in `bin/spark-class` as follows. ``` SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars" LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH" ``` ### Why are the changes needed? To build for Scala 2.13 successfully. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Tests for `core` module finish successfully. ``` build/sbt -Pscala-2.13 clean "core/test" ``` Closes #29927 from sarutak/fix-sparkbuild-for-scala-2.13. Authored-by: Kousuke Saruta <sarutak@oss.nttdata.com> Signed-off-by: HyukjinKwon <gurwls223@apache.org>
This commit is contained in:
parent
b205be5ff6
commit
f7ba95264d
|
@ -94,21 +94,6 @@ object SparkBuild extends PomBuild {
|
|||
case Some(v) =>
|
||||
v.split("(\\s+|,)").filterNot(_.isEmpty).map(_.trim.replaceAll("-P", "")).toSeq
|
||||
}
|
||||
|
||||
// TODO: revisit for Scala 2.13 support
|
||||
/*
|
||||
Option(System.getProperty("scala.version"))
|
||||
.filter(_.startsWith("2.11"))
|
||||
.foreach { versionString =>
|
||||
System.setProperty("scala-2.11", "true")
|
||||
}
|
||||
if (System.getProperty("scala-2.11") == "") {
|
||||
// To activate scala-2.10 profile, replace empty property value to non-empty value
|
||||
// in the same way as Maven which handles -Dname as -Dname=true before executes build process.
|
||||
// see: https://github.com/apache/maven/blob/maven-3.0.4/maven-embedder/src/main/java/org/apache/maven/cli/MavenCli.java#L1082
|
||||
System.setProperty("scala-2.11", "true")
|
||||
}
|
||||
*/
|
||||
profiles
|
||||
}
|
||||
|
||||
|
@ -965,17 +950,6 @@ object CopyDependencies {
|
|||
|
||||
object TestSettings {
|
||||
import BuildCommons._
|
||||
|
||||
// TODO revisit for Scala 2.13 support
|
||||
private val scalaBinaryVersion = "2.12"
|
||||
/*
|
||||
if (System.getProperty("scala-2.11") == "true") {
|
||||
"2.11"
|
||||
} else {
|
||||
"2.12"
|
||||
}
|
||||
*/
|
||||
|
||||
private val defaultExcludedTags = Seq("org.apache.spark.tags.ChromeUITest")
|
||||
|
||||
lazy val settings = Seq (
|
||||
|
@ -988,7 +962,7 @@ object TestSettings {
|
|||
(fullClasspath in Test).value.files.map(_.getAbsolutePath)
|
||||
.mkString(File.pathSeparator).stripSuffix(File.pathSeparator),
|
||||
"SPARK_PREPEND_CLASSES" -> "1",
|
||||
"SPARK_SCALA_VERSION" -> scalaBinaryVersion,
|
||||
"SPARK_SCALA_VERSION" -> scalaBinaryVersion.value,
|
||||
"SPARK_TESTING" -> "1",
|
||||
"JAVA_HOME" -> sys.env.get("JAVA_HOME").getOrElse(sys.props("java.home"))),
|
||||
javaOptions in Test += s"-Djava.io.tmpdir=$testTempDir",
|
||||
|
|
Loading…
Reference in a new issue