[SPARK-5096] Use sbt tasks instead of vals to get hadoop version
This makes it possible to compile spark as an external `ProjectRef` where as now we throw a `FileNotFoundException` Author: Michael Armbrust <michael@databricks.com> Closes #3905 from marmbrus/effectivePom and squashes the following commits: fd63aae [Michael Armbrust] Use sbt tasks instead of vals to get hadoop version.
This commit is contained in:
parent
c1f3c27f22
commit
6999910b0c
|
@ -114,17 +114,6 @@ object SparkBuild extends PomBuild {
|
||||||
|
|
||||||
override val userPropertiesMap = System.getProperties.toMap
|
override val userPropertiesMap = System.getProperties.toMap
|
||||||
|
|
||||||
// Handle case where hadoop.version is set via profile.
|
|
||||||
// Needed only because we read back this property in sbt
|
|
||||||
// when we create the assembly jar.
|
|
||||||
val pom = loadEffectivePom(new File("pom.xml"),
|
|
||||||
profiles = profiles,
|
|
||||||
userProps = userPropertiesMap)
|
|
||||||
if (System.getProperty("hadoop.version") == null) {
|
|
||||||
System.setProperty("hadoop.version",
|
|
||||||
pom.getProperties.get("hadoop.version").asInstanceOf[String])
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy val MavenCompile = config("m2r") extend(Compile)
|
lazy val MavenCompile = config("m2r") extend(Compile)
|
||||||
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
|
lazy val publishLocalBoth = TaskKey[Unit]("publish-local", "publish local for m2 and ivy")
|
||||||
|
|
||||||
|
@ -303,16 +292,15 @@ object Assembly {
|
||||||
import sbtassembly.Plugin._
|
import sbtassembly.Plugin._
|
||||||
import AssemblyKeys._
|
import AssemblyKeys._
|
||||||
|
|
||||||
|
val hadoopVersion = taskKey[String]("The version of hadoop that spark is compiled against.")
|
||||||
|
|
||||||
lazy val settings = assemblySettings ++ Seq(
|
lazy val settings = assemblySettings ++ Seq(
|
||||||
test in assembly := {},
|
test in assembly := {},
|
||||||
jarName in assembly <<= (version, moduleName) map { (v, mName) =>
|
hadoopVersion := {
|
||||||
if (mName.contains("network-yarn")) {
|
sys.props.get("hadoop.version")
|
||||||
// This must match the same name used in maven (see network/yarn/pom.xml)
|
.getOrElse(SbtPomKeys.effectivePom.value.getProperties.get("hadoop.version").asInstanceOf[String])
|
||||||
"spark-" + v + "-yarn-shuffle.jar"
|
|
||||||
} else {
|
|
||||||
mName + "-" + v + "-hadoop" + System.getProperty("hadoop.version") + ".jar"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
jarName in assembly := s"${moduleName.value}-${version.value}-hadoop${hadoopVersion.value}.jar",
|
||||||
mergeStrategy in assembly := {
|
mergeStrategy in assembly := {
|
||||||
case PathList("org", "datanucleus", xs @ _*) => MergeStrategy.discard
|
case PathList("org", "datanucleus", xs @ _*) => MergeStrategy.discard
|
||||||
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
|
case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
|
||||||
|
@ -323,7 +311,6 @@ object Assembly {
|
||||||
case _ => MergeStrategy.first
|
case _ => MergeStrategy.first
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
object Unidoc {
|
object Unidoc {
|
||||||
|
|
Loading…
Reference in a new issue