628932b8d0
Patch introduces the new way of working also retaining the existing ways of doing things.
For example build instruction for yarn in maven is
`mvn -Pyarn -PHadoop2.2 clean package -DskipTests`
in sbt it can become
`MAVEN_PROFILES="yarn, hadoop-2.2" sbt/sbt clean assembly`
Also supports
`sbt/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 clean assembly`
Author: Prashant Sharma <prashant.s@imaginea.com>
Author: Patrick Wendell <pwendell@gmail.com>
Closes #772 from ScrapCodes/sbt-maven and squashes the following commits:
a8ac951 [Prashant Sharma] Updated sbt version.
62b09bb [Prashant Sharma] Improvements.
fa6221d [Prashant Sharma] Excluding sql from mima
4b8875e [Prashant Sharma] Sbt assembly no longer builds tools by default.
72651ca [Prashant Sharma] Addresses code reivew comments.
acab73d [Prashant Sharma] Revert "Small fix to run-examples script."
ac4312c [Prashant Sharma] Revert "minor fix"
6af91ac [Prashant Sharma] Ported oldDeps back. + fixes issues with prev commit.
65cf06c [Prashant Sharma] Servelet API jars mess up with the other servlet jars on the class path.
446768e [Prashant Sharma] minor fix
89b9777 [Prashant Sharma] Merge conflicts
d0a02f2 [Prashant Sharma] Bumped up pom versions, Since the build now depends on pom it is better updated there. + general cleanups.
dccc8ac [Prashant Sharma] updated mima to check against 1.0
a49c61b [Prashant Sharma] Fix for tools jar
a2f5ae1 [Prashant Sharma] Fixes a bug in dependencies.
cf88758 [Prashant Sharma] cleanup
9439ea3 [Prashant Sharma] Small fix to run-examples script.
96cea1f [Prashant Sharma] SPARK-1776 Have Spark's SBT build read dependencies from Maven.
36efa62
[Patrick Wendell] Set project name in pom files and added eclipse/intellij plugins.
4973dbd [Patrick Wendell] Example build using pom reader.
96 lines
3.6 KiB
Scala
96 lines
3.6 KiB
Scala
/*
|
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
* contributor license agreements. See the NOTICE file distributed with
|
|
* this work for additional information regarding copyright ownership.
|
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
|
* (the "License"); you may not use this file except in compliance with
|
|
* the License. You may obtain a copy of the License at
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
* See the License for the specific language governing permissions and
|
|
* limitations under the License.
|
|
*/
|
|
|
|
import sbt._
|
|
import sbt.Keys.version
|
|
|
|
import com.typesafe.tools.mima.core._
|
|
import com.typesafe.tools.mima.core.MissingClassProblem
|
|
import com.typesafe.tools.mima.core.MissingTypesProblem
|
|
import com.typesafe.tools.mima.core.ProblemFilters._
|
|
import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters, previousArtifact}
|
|
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
|
|
|
|
|
|
object MimaBuild {
|
|
|
|
def excludeMember(fullName: String) = Seq(
|
|
ProblemFilters.exclude[MissingMethodProblem](fullName),
|
|
ProblemFilters.exclude[MissingFieldProblem](fullName),
|
|
ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
|
|
ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
|
|
ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
|
|
)
|
|
|
|
// Exclude a single class and its corresponding object
|
|
def excludeClass(className: String) = Seq(
|
|
excludePackage(className),
|
|
ProblemFilters.exclude[MissingClassProblem](className),
|
|
ProblemFilters.exclude[MissingTypesProblem](className),
|
|
excludePackage(className + "$"),
|
|
ProblemFilters.exclude[MissingClassProblem](className + "$"),
|
|
ProblemFilters.exclude[MissingTypesProblem](className + "$")
|
|
)
|
|
|
|
// Exclude a Spark class, that is in the package org.apache.spark
|
|
def excludeSparkClass(className: String) = {
|
|
excludeClass("org.apache.spark." + className)
|
|
}
|
|
|
|
// Exclude a Spark package, that is in the package org.apache.spark
|
|
def excludeSparkPackage(packageName: String) = {
|
|
excludePackage("org.apache.spark." + packageName)
|
|
}
|
|
|
|
def ignoredABIProblems(base: File, currentSparkVersion: String) = {
|
|
|
|
// Excludes placed here will be used for all Spark versions
|
|
val defaultExcludes = Seq()
|
|
|
|
// Read package-private excludes from file
|
|
val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
|
|
val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")
|
|
|
|
val ignoredClasses: Seq[String] =
|
|
if (!classExcludeFilePath.exists()) {
|
|
Seq()
|
|
} else {
|
|
IO.read(classExcludeFilePath).split("\n")
|
|
}
|
|
|
|
val ignoredMembers: Seq[String] =
|
|
if (!memberExcludeFilePath.exists()) {
|
|
Seq()
|
|
} else {
|
|
IO.read(memberExcludeFilePath).split("\n")
|
|
}
|
|
|
|
defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
|
|
ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
|
|
}
|
|
|
|
def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
|
|
val organization = "org.apache.spark"
|
|
val previousSparkVersion = "1.0.0"
|
|
val fullId = "spark-" + projectRef.project + "_2.10"
|
|
mimaDefaultSettings ++
|
|
Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),
|
|
binaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
|
|
}
|
|
|
|
}
|