[SPARK-15290][BUILD] Move annotations, like @Since / @DeveloperApi, into spark-tags

## What changes were proposed in this pull request?

(See https://github.com/apache/spark/pull/12416 where most of this was already reviewed and committed; this is just the module structure and move part. This change does not move the annotations into test scope, which was the apparently problem last time.)

Rename `spark-test-tags` -> `spark-tags`; move common annotations like `Since` to `spark-tags`

## How was this patch tested?

Jenkins tests.

Author: Sean Owen <sowen@cloudera.com>

Closes #13074 from srowen/SPARK-15290.
This commit is contained in:
Sean Owen 2016-05-17 09:55:53 +01:00
parent 8ad9f08c94
commit 122302cbf5
35 changed files with 59 additions and 41 deletions

View file

@ -66,7 +66,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>

View file

@ -80,7 +80,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>

View file

@ -48,7 +48,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<!-- Provided dependencies -->

View file

@ -38,7 +38,7 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>

View file

@ -27,12 +27,12 @@
</parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_2.11</artifactId>
<artifactId>spark-tags_2.11</artifactId>
<packaging>jar</packaging>
<name>Spark Project Test Tags</name>
<name>Spark Project Tags</name>
<url>http://spark.apache.org/</url>
<properties>
<sbt.project.name>test-tags</sbt.project.name>
<sbt.project.name>tags</sbt.project.name>
</properties>
<dependencies>

View file

@ -36,6 +36,10 @@
</properties>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>chill_${scala.binary.version}</artifactId>
@ -59,10 +63,6 @@
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>

View file

@ -331,7 +331,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -92,10 +92,17 @@ class Module(object):
def __hash__(self):
return hash(self.name)
tags = Module(
name="tags",
dependencies=[],
source_file_regexes=[
"common/tags/",
]
)
catalyst = Module(
name="catalyst",
dependencies=[],
dependencies=[tags],
source_file_regexes=[
"sql/catalyst/",
],
@ -165,7 +172,7 @@ hivecontext_compatibility = Module(
sketch = Module(
name="sketch",
dependencies=[],
dependencies=[tags],
source_file_regexes=[
"common/sketch/",
],
@ -177,7 +184,7 @@ sketch = Module(
graphx = Module(
name="graphx",
dependencies=[],
dependencies=[tags],
source_file_regexes=[
"graphx/",
],
@ -189,7 +196,7 @@ graphx = Module(
streaming = Module(
name="streaming",
dependencies=[],
dependencies=[tags],
source_file_regexes=[
"streaming",
],
@ -205,7 +212,7 @@ streaming = Module(
# fail other PRs.
streaming_kinesis_asl = Module(
name="streaming-kinesis-asl",
dependencies=[],
dependencies=[tags],
source_file_regexes=[
"external/kinesis-asl/",
"external/kinesis-asl-assembly/",
@ -270,7 +277,7 @@ streaming_flume_assembly = Module(
mllib_local = Module(
name="mllib-local",
dependencies=[],
dependencies=[tags],
source_file_regexes=[
"mllib-local",
],

View file

@ -126,7 +126,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>

View file

@ -92,7 +92,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -68,7 +68,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -72,7 +72,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>

View file

@ -88,7 +88,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -77,7 +77,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -72,7 +72,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -65,7 +65,7 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<!-- Not needed by the test code, but referenced by SparkSubmit which is used by the tests. -->

View file

@ -53,6 +53,10 @@
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<profiles>
<profile>

View file

@ -116,7 +116,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
</dependencies>
<profiles>

View file

@ -286,9 +286,8 @@
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.twitter</groupId>

View file

@ -716,6 +716,14 @@ object MimaExcludes {
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.executor.ShuffleReadMetrics.localBlocksFetched"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.remoteBlocksFetched"),
ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.localBlocksFetched")
) ++ Seq(
// [SPARK-15290] Move annotations, like @Since / @DeveloperApi, into spark-tags
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.Private"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.AlphaComponent"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.Experimental"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.DeveloperApi")
)
case v if v.startsWith("1.6") =>
Seq(

View file

@ -50,10 +50,10 @@ object BuildCommons {
).map(ProjectRef(buildLocation, _))
val allProjects@Seq(
core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, testTags, sketch, _*
core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, tags, sketch, _*
) = Seq(
"core", "graphx", "mllib", "mllib-local", "repl", "network-common", "network-shuffle", "launcher", "unsafe",
"test-tags", "sketch"
"tags", "sketch"
).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects
val optionallyEnabledProjects@Seq(yarn, java8Tests, sparkGangliaLgpl,
@ -340,7 +340,7 @@ object SparkBuild extends PomBuild {
val mimaProjects = allProjects.filterNot { x =>
Seq(
spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn,
unsafe, testTags, sketch, mllibLocal
unsafe, tags, sketch, mllibLocal
).contains(x)
}
@ -685,9 +685,9 @@ object Unidoc {
publish := {},
unidocProjectFilter in(ScalaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags),
unidocProjectFilter in(JavaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags),
// Skip actual catalyst, but include the subproject.
// Catalyst is not public API and contains quasiquotes which break scaladoc.

View file

@ -87,7 +87,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.xbean</groupId>

View file

@ -55,7 +55,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>

View file

@ -73,7 +73,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>

View file

@ -84,7 +84,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>net.sf.jpam</groupId>

View file

@ -60,7 +60,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<!--
<dependency>

View file

@ -49,7 +49,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<!-- Explicit listing of transitive deps that are shaded. Otherwise, odd compiler crashes. -->

View file

@ -54,7 +54,7 @@
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-test-tags_${scala.binary.version}</artifactId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>