SPARK-2899 Doc generation is back to working in new SBT Build.

The reason for this bug was introduciton of OldDeps project. It had to be excluded to prevent unidocs from trying to put it on "docs compile" classpath.

Author: Prashant Sharma <prashant.s@imaginea.com>

Closes #1830 from ScrapCodes/doc-fix and squashes the following commits:

e5d52e6 [Prashant Sharma] SPARK-2899 Doc generation is back to working in new SBT Build.
This commit is contained in:
Prashant Sharma 2014-08-07 16:24:22 -07:00 committed by Patrick Wendell
parent 80ec5bad13
commit 32096c2aed
2 changed files with 34 additions and 28 deletions

View file

@ -30,11 +30,11 @@ object BuildCommons {
private val buildLocation = file(".").getAbsoluteFile.getParentFile private val buildLocation = file(".").getAbsoluteFile.getParentFile
val allProjects@Seq(bagel, catalyst, core, graphx, hive, hiveThriftServer, mllib, repl, spark, val allProjects@Seq(bagel, catalyst, core, graphx, hive, hiveThriftServer, mllib, repl,
sql, streaming, streamingFlumeSink, streamingFlume, streamingKafka, streamingMqtt, sql, streaming, streamingFlumeSink, streamingFlume, streamingKafka, streamingMqtt,
streamingTwitter, streamingZeromq) = streamingTwitter, streamingZeromq) =
Seq("bagel", "catalyst", "core", "graphx", "hive", "hive-thriftserver", "mllib", "repl", Seq("bagel", "catalyst", "core", "graphx", "hive", "hive-thriftserver", "mllib", "repl",
"spark", "sql", "streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka", "sql", "streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka",
"streaming-mqtt", "streaming-twitter", "streaming-zeromq").map(ProjectRef(buildLocation, _)) "streaming-mqtt", "streaming-twitter", "streaming-zeromq").map(ProjectRef(buildLocation, _))
val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests, sparkGangliaLgpl, sparkKinesisAsl) = val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests, sparkGangliaLgpl, sparkKinesisAsl) =
@ -44,8 +44,9 @@ object BuildCommons {
val assemblyProjects@Seq(assembly, examples) = Seq("assembly", "examples") val assemblyProjects@Seq(assembly, examples) = Seq("assembly", "examples")
.map(ProjectRef(buildLocation, _)) .map(ProjectRef(buildLocation, _))
val tools = "tools" val tools = ProjectRef(buildLocation, "tools")
// Root project.
val spark = ProjectRef(buildLocation, "spark")
val sparkHome = buildLocation val sparkHome = buildLocation
} }
@ -126,26 +127,6 @@ object SparkBuild extends PomBuild {
publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn publishLocalBoth <<= Seq(publishLocal in MavenCompile, publishLocal).dependOn
) )
/** Following project only exists to pull previous artifacts of Spark for generating
Mima ignores. For more information see: SPARK 2071 */
lazy val oldDeps = Project("oldDeps", file("dev"), settings = oldDepsSettings)
def versionArtifact(id: String): Option[sbt.ModuleID] = {
val fullId = id + "_2.10"
Some("org.apache.spark" % fullId % "1.0.0")
}
def oldDepsSettings() = Defaults.defaultSettings ++ Seq(
name := "old-deps",
scalaVersion := "2.10.4",
retrieveManaged := true,
retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
"spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
"spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
"spark-core").map(versionArtifact(_).get intransitive())
)
def enable(settings: Seq[Setting[_]])(projectRef: ProjectRef) = { def enable(settings: Seq[Setting[_]])(projectRef: ProjectRef) = {
val existingSettings = projectsMap.getOrElse(projectRef.project, Seq[Setting[_]]()) val existingSettings = projectsMap.getOrElse(projectRef.project, Seq[Setting[_]]())
projectsMap += (projectRef.project -> (existingSettings ++ settings)) projectsMap += (projectRef.project -> (existingSettings ++ settings))
@ -184,7 +165,7 @@ object SparkBuild extends PomBuild {
super.projectDefinitions(baseDirectory).map { x => super.projectDefinitions(baseDirectory).map { x =>
if (projectsMap.exists(_._1 == x.id)) x.settings(projectsMap(x.id): _*) if (projectsMap.exists(_._1 == x.id)) x.settings(projectsMap(x.id): _*)
else x.settings(Seq[Setting[_]](): _*) else x.settings(Seq[Setting[_]](): _*)
} ++ Seq[Project](oldDeps) } ++ Seq[Project](OldDeps.project)
} }
} }
@ -193,6 +174,31 @@ object Flume {
lazy val settings = sbtavro.SbtAvro.avroSettings lazy val settings = sbtavro.SbtAvro.avroSettings
} }
/**
* Following project only exists to pull previous artifacts of Spark for generating
* Mima ignores. For more information see: SPARK 2071
*/
object OldDeps {
lazy val project = Project("oldDeps", file("dev"), settings = oldDepsSettings)
def versionArtifact(id: String): Option[sbt.ModuleID] = {
val fullId = id + "_2.10"
Some("org.apache.spark" % fullId % "1.0.0")
}
def oldDepsSettings() = Defaults.defaultSettings ++ Seq(
name := "old-deps",
scalaVersion := "2.10.4",
retrieveManaged := true,
retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
libraryDependencies := Seq("spark-streaming-mqtt", "spark-streaming-zeromq",
"spark-streaming-flume", "spark-streaming-kafka", "spark-streaming-twitter",
"spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
"spark-core").map(versionArtifact(_).get intransitive())
)
}
object Catalyst { object Catalyst {
lazy val settings = Seq( lazy val settings = Seq(
addCompilerPlugin("org.scalamacros" % "paradise" % "2.0.1" cross CrossVersion.full), addCompilerPlugin("org.scalamacros" % "paradise" % "2.0.1" cross CrossVersion.full),
@ -285,9 +291,9 @@ object Unidoc {
publish := {}, publish := {},
unidocProjectFilter in(ScalaUnidoc, unidoc) := unidocProjectFilter in(ScalaUnidoc, unidoc) :=
inAnyProject -- inProjects(repl, examples, tools, catalyst, yarn, yarnAlpha), inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst, yarn, yarnAlpha),
unidocProjectFilter in(JavaUnidoc, unidoc) := unidocProjectFilter in(JavaUnidoc, unidoc) :=
inAnyProject -- inProjects(repl, bagel, graphx, examples, tools, catalyst, yarn, yarnAlpha), inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst, yarn, yarnAlpha),
// Skip class names containing $ and some internal packages in Javadocs // Skip class names containing $ and some internal packages in Javadocs
unidocAllSources in (JavaUnidoc, unidoc) := { unidocAllSources in (JavaUnidoc, unidoc) := {

View file

@ -23,6 +23,6 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.6")
addSbtPlugin("com.alpinenow" % "junit_xml_listener" % "0.5.1") addSbtPlugin("com.alpinenow" % "junit_xml_listener" % "0.5.1")
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.0") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.1")
addSbtPlugin("com.cavorite" % "sbt-avro" % "0.3.2") addSbtPlugin("com.cavorite" % "sbt-avro" % "0.3.2")