Adding ability to custom deploy only Spark

This commit is contained in:
Patrick Wendell 2013-07-11 15:44:31 -07:00
parent 311e63f2c7
commit f1689185d6

View file

@ -495,8 +495,15 @@ def deploy_files(conn, root_dir, opts, master_nodes, slave_nodes, modules):
cluster_url = "%s:7077" % active_master
if "." in opts.spark_version:
# Pre-built deploy
(spark_v, shark_v) = get_spark_shark_version(opts)
else if opts.shark_version is None:
# Spark-only custom deploy
spark_v = "%s|%s" % (opts.spark_git_repo, opts.spark_version)
shark_v = ""
modules = filter(lambda x: x != "shark", modules)
else:
# Spark and Shark custom deploy
spark_v = "%s|%s" % (opts.spark_git_repo, opts.spark_version)
shark_v = "%s|%s" % (opts.shark_git_repo, opts.shark_version)