Added SBT target for building a single JAR with Spark Core and its
dependencies
This commit is contained in:
parent
a11fe23017
commit
50df43bf7b
|
@ -1,7 +1,10 @@
|
|||
import sbt._
|
||||
import sbt.Process._
|
||||
|
||||
import assembly._
|
||||
|
||||
import de.element34.sbteclipsify._
|
||||
|
||||
import sbt.Process._
|
||||
|
||||
class SparkProject(info: ProjectInfo)
|
||||
extends ParentProject(info) with IdeaProject
|
||||
|
@ -12,31 +15,54 @@ extends ParentProject(info) with IdeaProject
|
|||
project("examples", "Spark Examples", new ExamplesProject(_), core)
|
||||
|
||||
class CoreProject(info: ProjectInfo)
|
||||
extends DefaultProject(info) with Eclipsify with IdeaProject
|
||||
extends DefaultProject(info) with Eclipsify with IdeaProject with AssemblyBuilder
|
||||
{
|
||||
val TARGET = path("target") / "scala_2.8.1"
|
||||
|
||||
val TEST_REPORT_DIR = TARGET / "test-report"
|
||||
def testReportDir = outputPath / "test-report"
|
||||
|
||||
// Create an XML test report using ScalaTest's -u option. Unfortunately
|
||||
// there is currently no way to call this directly from SBT without
|
||||
// executing a subprocess.
|
||||
lazy val testReport = task {
|
||||
log.info("Creating " + TEST_REPORT_DIR + "...")
|
||||
if (!TEST_REPORT_DIR.exists) {
|
||||
TEST_REPORT_DIR.asFile.mkdirs()
|
||||
log.info("Creating " + testReportDir + "...")
|
||||
if (!testReportDir.exists) {
|
||||
testReportDir.asFile.mkdirs()
|
||||
}
|
||||
|
||||
log.info("Executing org.scalatest.tools.Runner...")
|
||||
val command = ("scala -classpath " + testClasspath.absString +
|
||||
" org.scalatest.tools.Runner -o " +
|
||||
" -u " + TEST_REPORT_DIR.absolutePath +
|
||||
" -p " + (TARGET / "test-classes").absolutePath)
|
||||
" -u " + testReportDir.absolutePath +
|
||||
" -p " + (outputPath / "test-classes").absolutePath)
|
||||
val process = Process(command, path("."), "JAVA_OPTS" -> "-Xmx500m")
|
||||
process !
|
||||
|
||||
None
|
||||
}.dependsOn(compile, testCompile).describedAs("Generate XML test report.")
|
||||
|
||||
def singleJarExclude(base: PathFinder) = {
|
||||
(base / "scala" ** "*") +++ ( // exclude scala library
|
||||
(base / "META-INF" ** "*") --- // generally ignore the hell out of META-INF
|
||||
(base / "META-INF" / "services" ** "*") --- // include all service providers
|
||||
(base / "META-INF" / "maven" ** "*")) // include all Maven POMs and such
|
||||
}
|
||||
|
||||
def singleJarTempDir = outputPath / "single-jar-classes"
|
||||
|
||||
def singleJarOutputPath =
|
||||
outputPath / (name.toLowerCase.replace(" ", "-") + "-single-jar-" + version.toString + ".jar")
|
||||
|
||||
// Create a JAR with Spark Core and all its dependencies. We use some methods in
|
||||
// the AssemblyBuilder plugin, but because this plugin attempts to package Scala
|
||||
// too, we leave that out using our own exclude filter (singleJarExclude).
|
||||
lazy val singleJar = {
|
||||
packageTask(
|
||||
Path.lazyPathFinder(assemblyPaths(singleJarTempDir,
|
||||
assemblyClasspath,
|
||||
assemblyExtraJars,
|
||||
singleJarExclude)),
|
||||
singleJarOutputPath,
|
||||
packageOptions)
|
||||
}.dependsOn(compile).describedAs("Build a single JAR with project and its dependencies")
|
||||
}
|
||||
|
||||
class ExamplesProject(info: ProjectInfo)
|
||||
|
|
|
@ -5,4 +5,7 @@ class SparkProjectPlugins(info: ProjectInfo) extends PluginDefinition(info) {
|
|||
|
||||
val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
|
||||
val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.2.0"
|
||||
|
||||
val codaRepo = "Coda Hale's Repository" at "http://repo.codahale.com/"
|
||||
val assemblySBT = "com.codahale" % "assembly-sbt" % "0.1.1"
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#Project properties
|
||||
#Tue Feb 01 23:56:56 PST 2011
|
||||
#Wed Feb 02 16:50:14 PST 2011
|
||||
plugin.uptodate=true
|
||||
|
|
Loading…
Reference in a new issue