[BUILD] Runner for spark packages
This is a convenience method added to the SBT build for developers, though if people think its useful we could consider adding a official script that runs using the assembly instead of compiling on demand. It simply compiles spark (without requiring an assembly), and invokes Spark Submit to download / run the package. Example Usage: ``` $ build/sbt > sparkPackage com.databricks:spark-sql-perf_2.10:0.2.4 com.databricks.spark.sql.perf.RunBenchmark --help ``` Author: Michael Armbrust <michael@databricks.com> Closes #10834 from marmbrus/sparkPackageRunner.
This commit is contained in:
parent
c6f971b4ae
commit
efd7eed322
|
@ -274,6 +274,11 @@ object SparkBuild extends PomBuild {
|
|||
* Usage: `build/sbt sparkShell`
|
||||
*/
|
||||
val sparkShell = taskKey[Unit]("start a spark-shell.")
|
||||
val sparkPackage = inputKey[Unit](
|
||||
s"""
|
||||
|Download and run a spark package.
|
||||
|Usage `builds/sbt "sparkPackage <group:artifact:version> <MainClass> [args]
|
||||
""".stripMargin)
|
||||
val sparkSql = taskKey[Unit]("starts the spark sql CLI.")
|
||||
|
||||
enable(Seq(
|
||||
|
@ -287,6 +292,16 @@ object SparkBuild extends PomBuild {
|
|||
(runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
|
||||
},
|
||||
|
||||
sparkPackage := {
|
||||
import complete.DefaultParsers._
|
||||
val packages :: className :: otherArgs = spaceDelimited("<group:artifact:version> <MainClass> [args]").parsed.toList
|
||||
val scalaRun = (runner in run).value
|
||||
val classpath = (fullClasspath in Runtime).value
|
||||
val args = Seq("--packages", packages, "--class", className, (Keys.`package` in Compile in "core").value.getCanonicalPath) ++ otherArgs
|
||||
println(args)
|
||||
scalaRun.run("org.apache.spark.deploy.SparkSubmit", classpath.map(_.data), args, streams.value.log)
|
||||
},
|
||||
|
||||
javaOptions in Compile += "-Dspark.master=local",
|
||||
|
||||
sparkSql := {
|
||||
|
|
Loading…
Reference in a new issue