diff --git a/assembly/src/main/assembly/assembly.xml b/assembly/src/main/assembly/assembly.xml index 47d3fa93d0..6af383db65 100644 --- a/assembly/src/main/assembly/assembly.xml +++ b/assembly/src/main/assembly/assembly.xml @@ -39,23 +39,20 @@ - ${project.parent.basedir}/bin/ + ${project.parent.basedir}/sbin/ - /bin + /sbin **/* - ${project.parent.basedir} + ${project.parent.basedir}/bin/ /bin - run-example* - spark-class* - spark-shell* - spark-executor* + **/* diff --git a/pyspark b/bin/pyspark similarity index 96% rename from pyspark rename to bin/pyspark index 12cc926dda..d6810f4686 100755 --- a/pyspark +++ b/bin/pyspark @@ -18,7 +18,7 @@ # # Figure out where the Scala framework is installed -FWDIR="$(cd `dirname $0`; pwd)" +FWDIR="$(cd `dirname $0`/..; pwd)" # Export this as SPARK_HOME export SPARK_HOME="$FWDIR" @@ -37,7 +37,7 @@ if [ ! -f "$FWDIR/RELEASE" ]; then fi # Load environment variables from conf/spark-env.sh, if it exists -if [ -e $FWDIR/conf/spark-env.sh ] ; then +if [ -e "$FWDIR/conf/spark-env.sh" ] ; then . $FWDIR/conf/spark-env.sh fi diff --git a/pyspark.cmd b/bin/pyspark.cmd similarity index 100% rename from pyspark.cmd rename to bin/pyspark.cmd diff --git a/pyspark2.cmd b/bin/pyspark2.cmd similarity index 98% rename from pyspark2.cmd rename to bin/pyspark2.cmd index 21f9a34388..95791095ec 100644 --- a/pyspark2.cmd +++ b/bin/pyspark2.cmd @@ -20,7 +20,7 @@ rem set SCALA_VERSION=2.10 rem Figure out where the Spark framework is installed -set FWDIR=%~dp0 +set FWDIR=%~dp0..\ rem Export this as SPARK_HOME set SPARK_HOME=%FWDIR% diff --git a/run-example b/bin/run-example similarity index 95% rename from run-example rename to bin/run-example index a78192d31d..f2699c38a9 100755 --- a/run-example +++ b/bin/run-example @@ -25,13 +25,13 @@ esac SCALA_VERSION=2.10 # Figure out where the Scala framework is installed -FWDIR="$(cd `dirname $0`; pwd)" +FWDIR="$(cd `dirname $0`/..; pwd)" # Export this as SPARK_HOME export SPARK_HOME="$FWDIR" # Load environment variables from conf/spark-env.sh, if it exists -if [ -e $FWDIR/conf/spark-env.sh ] ; then +if [ -e "$FWDIR/conf/spark-env.sh" ] ; then . $FWDIR/conf/spark-env.sh fi @@ -61,7 +61,7 @@ fi # Since the examples JAR ideally shouldn't include spark-core (that dependency should be # "provided"), also add our standard Spark classpath, built using compute-classpath.sh. -CLASSPATH=`$FWDIR/bin/compute-classpath.sh` +CLASSPATH=`$FWDIR/sbin/compute-classpath.sh` CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH" if $cygwin; then diff --git a/run-example.cmd b/bin/run-example.cmd similarity index 100% rename from run-example.cmd rename to bin/run-example.cmd diff --git a/run-example2.cmd b/bin/run-example2.cmd similarity index 97% rename from run-example2.cmd rename to bin/run-example2.cmd index d4ad98d6e7..6861334cb0 100644 --- a/run-example2.cmd +++ b/bin/run-example2.cmd @@ -20,7 +20,7 @@ rem set SCALA_VERSION=2.10 rem Figure out where the Spark framework is installed -set FWDIR=%~dp0 +set FWDIR=%~dp0..\ rem Export this as SPARK_HOME set SPARK_HOME=%FWDIR% @@ -49,7 +49,7 @@ if "x%SPARK_EXAMPLES_JAR%"=="x" ( rem Compute Spark classpath using external script set DONT_PRINT_CLASSPATH=1 -call "%FWDIR%bin\compute-classpath.cmd" +call "%FWDIR%sbin\compute-classpath.cmd" set DONT_PRINT_CLASSPATH=0 set CLASSPATH=%SPARK_EXAMPLES_JAR%;%CLASSPATH% diff --git a/spark-shell b/bin/spark-shell similarity index 93% rename from spark-shell rename to bin/spark-shell index d20af0fb39..bc7386db4d 100755 --- a/spark-shell +++ b/bin/spark-shell @@ -32,7 +32,7 @@ esac # Enter posix mode for bash set -o posix -FWDIR="`dirname $0`" +FWDIR="$(cd `dirname $0`/..; pwd)" for o in "$@"; do if [ "$1" = "-c" -o "$1" = "--cores" ]; then @@ -90,10 +90,10 @@ if $cygwin; then # "Backspace sends ^H" setting in "Keys" section of the Mintty options # (see https://github.com/sbt/sbt/issues/562). stty -icanon min 1 -echo > /dev/null 2>&1 - $FWDIR/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@" + $FWDIR/sbin/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@" stty icanon echo > /dev/null 2>&1 else - $FWDIR/spark-class $OPTIONS org.apache.spark.repl.Main "$@" + $FWDIR/sbin/spark-class $OPTIONS org.apache.spark.repl.Main "$@" fi # record the exit status lest it be overwritten: diff --git a/spark-shell.cmd b/bin/spark-shell.cmd similarity index 87% rename from spark-shell.cmd rename to bin/spark-shell.cmd index 3e52bf835e..23973e3e3d 100644 --- a/spark-shell.cmd +++ b/bin/spark-shell.cmd @@ -17,6 +17,7 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set FWDIR=%~dp0 +rem Find the path of sbin +set SBIN=%~dp0..\sbin\ -cmd /V /E /C %FWDIR%spark-class2.cmd org.apache.spark.repl.Main %* +cmd /V /E /C %SBIN%spark-class2.cmd org.apache.spark.repl.Main %* diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala index fff9cb60c7..d4084820f6 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala @@ -122,7 +122,7 @@ private[spark] class ExecutorRunner( // Figure out our classpath with the external compute-classpath script val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh" val classPath = Utils.executeAndGetOutput( - Seq(sparkHome + "/bin/compute-classpath" + ext), + Seq(sparkHome + "/sbin/compute-classpath" + ext), extraEnvironment=appDesc.command.environment) Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala index 08811520cf..0494ca8726 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala @@ -127,7 +127,7 @@ private[spark] class CoarseMesosSchedulerBackend( CoarseGrainedSchedulerBackend.ACTOR_NAME) val uri = conf.get("spark.executor.uri", null) if (uri == null) { - val runScript = new File(sparkHome, "spark-class").getCanonicalPath + val runScript = new File(sparkHome, "./sbin/spark-class").getCanonicalPath command.setValue( "\"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d".format( runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores)) @@ -136,7 +136,7 @@ private[spark] class CoarseMesosSchedulerBackend( // glob the directory "correctly". val basename = uri.split('/').last.split('.').head command.setValue( - "cd %s*; ./spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d" + "cd %s*; ./sbin/spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d" .format(basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores)) command.addUris(CommandInfo.URI.newBuilder().setValue(uri)) } diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala index bb278fb155..ae8d527352 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala @@ -102,12 +102,12 @@ private[spark] class MesosSchedulerBackend( .setEnvironment(environment) val uri = sc.conf.get("spark.executor.uri", null) if (uri == null) { - command.setValue(new File(sparkHome, "spark-executor").getCanonicalPath) + command.setValue(new File(sparkHome, "/sbin/spark-executor").getCanonicalPath) } else { // Grab everything to the first '.'. We'll use that and '*' to // glob the directory "correctly". val basename = uri.split('/').last.split('.').head - command.setValue("cd %s*; ./spark-executor".format(basename)) + command.setValue("cd %s*; ./sbin/spark-executor".format(basename)) command.addUris(CommandInfo.URI.newBuilder().setValue(uri)) } val memory = Resource.newBuilder() diff --git a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala index 58d47a201d..f207627e52 100644 --- a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala +++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala @@ -36,7 +36,7 @@ private[spark] object UIWorkloadGenerator { def main(args: Array[String]) { if (args.length < 2) { - println("usage: ./spark-class org.apache.spark.ui.UIWorkloadGenerator [master] [FIFO|FAIR]") + println("usage: ./sbin/spark-class org.apache.spark.ui.UIWorkloadGenerator [master] [FIFO|FAIR]") System.exit(1) } diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala index 6d1695eae7..c37fd9ab62 100644 --- a/core/src/test/scala/org/apache/spark/DriverSuite.scala +++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala @@ -35,7 +35,7 @@ class DriverSuite extends FunSuite with Timeouts { val masters = Table(("master"), ("local"), ("local-cluster[2,1,512]")) forAll(masters) { (master: String) => failAfter(60 seconds) { - Utils.execute(Seq("./spark-class", "org.apache.spark.DriverWithoutCleanup", master), + Utils.execute(Seq("./sbin/spark-class", "org.apache.spark.DriverWithoutCleanup", master), new File(System.getenv("SPARK_HOME"))) } } diff --git a/kmeans_data.txt b/data/kmeans_data.txt similarity index 100% rename from kmeans_data.txt rename to data/kmeans_data.txt diff --git a/lr_data.txt b/data/lr_data.txt similarity index 100% rename from lr_data.txt rename to data/lr_data.txt diff --git a/pagerank_data.txt b/data/pagerank_data.txt similarity index 100% rename from pagerank_data.txt rename to data/pagerank_data.txt diff --git a/docs/running-on-yarn.md b/docs/running-on-yarn.md index aa75ca4324..00cad99881 100644 --- a/docs/running-on-yarn.md +++ b/docs/running-on-yarn.md @@ -54,7 +54,7 @@ There are two scheduler mode that can be used to launch spark application on YAR The command to launch the YARN Client is as follows: - SPARK_JAR= ./spark-class org.apache.spark.deploy.yarn.Client \ + SPARK_JAR= ./sbin/spark-class org.apache.spark.deploy.yarn.Client \ --jar \ --class \ --args \ @@ -79,7 +79,7 @@ For example: # Submit Spark's ApplicationMaster to YARN's ResourceManager, and instruct Spark to run the SparkPi example $ SPARK_JAR=./assembly/target/scala-{{site.SCALA_VERSION}}/spark-assembly-{{site.SPARK_VERSION}}-hadoop2.0.5-alpha.jar \ - ./spark-class org.apache.spark.deploy.yarn.Client \ + ./sbin/spark-class org.apache.spark.deploy.yarn.Client \ --jar examples/target/scala-{{site.SCALA_VERSION}}/spark-examples-assembly-{{site.SPARK_VERSION}}.jar \ --class org.apache.spark.examples.SparkPi \ --args yarn-standalone \ diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md index f7f0b78908..c2ce23081a 100644 --- a/docs/spark-standalone.md +++ b/docs/spark-standalone.md @@ -28,7 +28,7 @@ the master's web UI, which is [http://localhost:8080](http://localhost:8080) by Similarly, you can start one or more workers and connect them to the master via: - ./spark-class org.apache.spark.deploy.worker.Worker spark://IP:PORT + ./sbin/spark-class org.apache.spark.deploy.worker.Worker spark://IP:PORT Once you have started a worker, look at the master's web UI ([http://localhost:8080](http://localhost:8080) by default). You should see the new node listed there, along with its number of CPUs and memory (minus one gigabyte left for the OS). @@ -70,12 +70,12 @@ To launch a Spark standalone cluster with the launch scripts, you need to create Once you've set up this file, you can launch or stop your cluster with the following shell scripts, based on Hadoop's deploy scripts, and available in `SPARK_HOME/bin`: -- `bin/start-master.sh` - Starts a master instance on the machine the script is executed on. -- `bin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file. -- `bin/start-all.sh` - Starts both a master and a number of slaves as described above. -- `bin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script. -- `bin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`. -- `bin/stop-all.sh` - Stops both the master and the slaves as described above. +- `sbin/start-master.sh` - Starts a master instance on the machine the script is executed on. +- `sbin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file. +- `sbin/start-all.sh` - Starts both a master and a number of slaves as described above. +- `sbin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script. +- `sbin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`. +- `sbin/stop-all.sh` - Stops both the master and the slaves as described above. Note that these scripts must be executed on the machine you want to run the Spark master on, not your local machine. diff --git a/make-distribution.sh b/make-distribution.sh index 32bbdb90a5..82638a23a0 100755 --- a/make-distribution.sh +++ b/make-distribution.sh @@ -98,10 +98,7 @@ mkdir "$DISTDIR"/conf cp "$FWDIR"/conf/*.template "$DISTDIR"/conf cp -r "$FWDIR/bin" "$DISTDIR" cp -r "$FWDIR/python" "$DISTDIR" -cp "$FWDIR/spark-class" "$DISTDIR" -cp "$FWDIR/spark-shell" "$DISTDIR" -cp "$FWDIR/spark-executor" "$DISTDIR" -cp "$FWDIR/pyspark" "$DISTDIR" +cp -r "$FWDIR/sbin" "$DISTDIR" if [ "$MAKE_TGZ" == "true" ]; then diff --git a/python/pyspark/java_gateway.py b/python/pyspark/java_gateway.py index d8ca9fce00..7243ee6861 100644 --- a/python/pyspark/java_gateway.py +++ b/python/pyspark/java_gateway.py @@ -31,7 +31,7 @@ def launch_gateway(): # Launch the Py4j gateway using Spark's run command so that we pick up the # proper classpath and SPARK_MEM settings from spark-env.sh on_windows = platform.system() == "Windows" - script = "spark-class.cmd" if on_windows else "spark-class" + script = "./sbin/spark-class.cmd" if on_windows else "./sbin/spark-class" command = [os.path.join(SPARK_HOME, script), "py4j.GatewayServer", "--die-on-broken-pipe", "0"] if not on_windows: diff --git a/python/run-tests b/python/run-tests index 4b71fff7c1..feba97cee0 100755 --- a/python/run-tests +++ b/python/run-tests @@ -29,7 +29,7 @@ FAILED=0 rm -f unit-tests.log function run_test() { - SPARK_TESTING=0 $FWDIR/pyspark $1 2>&1 | tee -a unit-tests.log + SPARK_TESTING=0 $FWDIR/bin/pyspark $1 2>&1 | tee -a unit-tests.log FAILED=$((PIPESTATUS[0]||$FAILED)) } diff --git a/repl-bin/src/deb/bin/run b/repl-bin/src/deb/bin/run index 47bb654baf..3a6f22f41f 100755 --- a/repl-bin/src/deb/bin/run +++ b/repl-bin/src/deb/bin/run @@ -48,8 +48,7 @@ fi export JAVA_OPTS # Build up classpath -CLASSPATH="$SPARK_CLASSPATH" -CLASSPATH+=":$FWDIR/conf" +CLASSPATH=":$FWDIR/conf" for jar in `find $FWDIR -name '*jar'`; do CLASSPATH+=":$jar" done diff --git a/repl/pom.xml b/repl/pom.xml index b0e7877bbb..2dfe7ac900 100644 --- a/repl/pom.xml +++ b/repl/pom.xml @@ -127,7 +127,6 @@ ${basedir}/.. 1 - ${spark.classpath} diff --git a/bin/compute-classpath.cmd b/sbin/compute-classpath.cmd similarity index 98% rename from bin/compute-classpath.cmd rename to sbin/compute-classpath.cmd index 9e3e10ecaa..4f60bff19c 100644 --- a/bin/compute-classpath.cmd +++ b/sbin/compute-classpath.cmd @@ -29,7 +29,7 @@ rem Load environment variables from conf\spark-env.cmd, if it exists if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd" rem Build up classpath -set CLASSPATH=%SPARK_CLASSPATH%;%FWDIR%conf +set CLASSPATH=%FWDIR%conf if exist "%FWDIR%RELEASE" ( for %%d in ("%FWDIR%jars\spark-assembly*.jar") do ( set ASSEMBLY_JAR=%%d diff --git a/bin/compute-classpath.sh b/sbin/compute-classpath.sh similarity index 98% rename from bin/compute-classpath.sh rename to sbin/compute-classpath.sh index 40555089fc..0c82310421 100755 --- a/bin/compute-classpath.sh +++ b/sbin/compute-classpath.sh @@ -26,7 +26,7 @@ SCALA_VERSION=2.10 FWDIR="$(cd `dirname $0`/..; pwd)" # Load environment variables from conf/spark-env.sh, if it exists -if [ -e $FWDIR/conf/spark-env.sh ] ; then +if [ -e "$FWDIR/conf/spark-env.sh" ] ; then . $FWDIR/conf/spark-env.sh fi diff --git a/bin/slaves.sh b/sbin/slaves.sh similarity index 97% rename from bin/slaves.sh rename to sbin/slaves.sh index c367c2fd8e..a5bc2183d8 100755 --- a/bin/slaves.sh +++ b/sbin/slaves.sh @@ -36,10 +36,10 @@ if [ $# -le 0 ]; then exit 1 fi -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # If the slaves file is specified in the command line, # then it takes precedence over the definition in diff --git a/spark-class b/sbin/spark-class similarity index 96% rename from spark-class rename to sbin/spark-class index 1858ea6247..4e440d8729 100755 --- a/spark-class +++ b/sbin/spark-class @@ -25,13 +25,13 @@ esac SCALA_VERSION=2.10 # Figure out where the Scala framework is installed -FWDIR="$(cd `dirname $0`; pwd)" +FWDIR="$(cd `dirname $0`/..; pwd)" # Export this as SPARK_HOME export SPARK_HOME="$FWDIR" # Load environment variables from conf/spark-env.sh, if it exists -if [ -e $FWDIR/conf/spark-env.sh ] ; then +if [ -e "$FWDIR/conf/spark-env.sh" ] ; then . $FWDIR/conf/spark-env.sh fi @@ -92,7 +92,7 @@ JAVA_OPTS="$OUR_JAVA_OPTS" JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH" JAVA_OPTS="$JAVA_OPTS -Xms$SPARK_MEM -Xmx$SPARK_MEM" # Load extra JAVA_OPTS from conf/java-opts, if it exists -if [ -e $FWDIR/conf/java-opts ] ; then +if [ -e "$FWDIR/conf/java-opts" ] ; then JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`" fi export JAVA_OPTS @@ -128,7 +128,7 @@ if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then fi # Compute classpath using external script -CLASSPATH=`$FWDIR/bin/compute-classpath.sh` +CLASSPATH=`$FWDIR/sbin/compute-classpath.sh` if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR" diff --git a/spark-class.cmd b/sbin/spark-class.cmd similarity index 100% rename from spark-class.cmd rename to sbin/spark-class.cmd diff --git a/spark-class2.cmd b/sbin/spark-class2.cmd similarity index 98% rename from spark-class2.cmd rename to sbin/spark-class2.cmd index dc9dadf356..460e661476 100644 --- a/spark-class2.cmd +++ b/sbin/spark-class2.cmd @@ -20,7 +20,7 @@ rem set SCALA_VERSION=2.10 rem Figure out where the Spark framework is installed -set FWDIR=%~dp0 +set FWDIR=%~dp0..\ rem Export this as SPARK_HOME set SPARK_HOME=%FWDIR% @@ -73,7 +73,7 @@ for %%d in ("%TOOLS_DIR%\target\scala-%SCALA_VERSION%\spark-tools*assembly*.jar" rem Compute classpath using external script set DONT_PRINT_CLASSPATH=1 -call "%FWDIR%bin\compute-classpath.cmd" +call "%FWDIR%sbin\compute-classpath.cmd" set DONT_PRINT_CLASSPATH=0 set CLASSPATH=%CLASSPATH%;%SPARK_TOOLS_JAR% diff --git a/bin/spark-config.sh b/sbin/spark-config.sh similarity index 100% rename from bin/spark-config.sh rename to sbin/spark-config.sh diff --git a/bin/spark-daemon.sh b/sbin/spark-daemon.sh similarity index 96% rename from bin/spark-daemon.sh rename to sbin/spark-daemon.sh index a0c0d44b58..ca6b893b9b 100755 --- a/bin/spark-daemon.sh +++ b/sbin/spark-daemon.sh @@ -37,10 +37,10 @@ if [ $# -le 1 ]; then exit 1 fi -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # get arguments @@ -147,7 +147,7 @@ case $startStop in spark_rotate_log "$log" echo starting $command, logging to $log cd "$SPARK_PREFIX" - nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/spark-class $command "$@" >> "$log" 2>&1 < /dev/null & + nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/sbin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null & newpid=$! echo $newpid > $pid sleep 2 diff --git a/bin/spark-daemons.sh b/sbin/spark-daemons.sh similarity index 88% rename from bin/spark-daemons.sh rename to sbin/spark-daemons.sh index 64286cb2da..5d9f2bb51c 100755 --- a/bin/spark-daemons.sh +++ b/sbin/spark-daemons.sh @@ -27,9 +27,9 @@ if [ $# -le 1 ]; then exit 1 fi -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" -exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/spark-daemon.sh" "$@" +exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@" diff --git a/spark-executor b/sbin/spark-executor similarity index 88% rename from spark-executor rename to sbin/spark-executor index 2c07c54843..214e00f6f8 100755 --- a/spark-executor +++ b/sbin/spark-executor @@ -17,6 +17,7 @@ # limitations under the License. # -FWDIR="`dirname $0`" +FWDIR="$(cd `dirname $0`/..; pwd)" + echo "Running spark-executor with framework dir = $FWDIR" -exec $FWDIR/spark-class org.apache.spark.executor.MesosExecutorBackend +exec $FWDIR/sbin/spark-class org.apache.spark.executor.MesosExecutorBackend diff --git a/bin/start-all.sh b/sbin/start-all.sh similarity index 89% rename from bin/start-all.sh rename to sbin/start-all.sh index 0182f1ab24..2daf49db35 100755 --- a/bin/start-all.sh +++ b/sbin/start-all.sh @@ -21,14 +21,14 @@ # Starts the master on this node. # Starts a worker on each node specified in conf/slaves -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` # Load the Spark configuration -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # Start Master -"$bin"/start-master.sh +"$sbin"/start-master.sh # Start Workers -"$bin"/start-slaves.sh +"$sbin"/start-slaves.sh diff --git a/bin/start-master.sh b/sbin/start-master.sh similarity index 88% rename from bin/start-master.sh rename to sbin/start-master.sh index 648c7ae75f..3dcf7cc348 100755 --- a/bin/start-master.sh +++ b/sbin/start-master.sh @@ -19,10 +19,10 @@ # Starts the master on the machine this script is executed on. -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then . "${SPARK_CONF_DIR}/spark-env.sh" @@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then fi fi -"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT +"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT diff --git a/bin/start-slave.sh b/sbin/start-slave.sh similarity index 92% rename from bin/start-slave.sh rename to sbin/start-slave.sh index 4eefa20944..524be38c62 100755 --- a/bin/start-slave.sh +++ b/sbin/start-slave.sh @@ -20,8 +20,8 @@ # Usage: start-slave.sh # where is like "spark://localhost:7077" -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` # Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI if [ "$SPARK_PUBLIC_DNS" = "" ]; then @@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then fi fi -"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@" +"$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@" diff --git a/bin/start-slaves.sh b/sbin/start-slaves.sh similarity index 78% rename from bin/start-slaves.sh rename to sbin/start-slaves.sh index 00dc4888b2..fd5cdeb1e6 100755 --- a/bin/start-slaves.sh +++ b/sbin/start-slaves.sh @@ -17,10 +17,10 @@ # limitations under the License. # -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then . "${SPARK_CONF_DIR}/spark-env.sh" @@ -37,12 +37,12 @@ fi # Launch the slaves if [ "$SPARK_WORKER_INSTANCES" = "" ]; then - exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT + exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT else if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then SPARK_WORKER_WEBUI_PORT=8081 fi for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do - "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i )) + "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i )) done fi diff --git a/bin/stop-all.sh b/sbin/stop-all.sh similarity index 89% rename from bin/stop-all.sh rename to sbin/stop-all.sh index b6c83a7ba4..60b358d374 100755 --- a/bin/stop-all.sh +++ b/sbin/stop-all.sh @@ -21,12 +21,12 @@ # Run this on the master nde -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` # Load the Spark configuration -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # Stop the slaves, then the master -"$bin"/stop-slaves.sh -"$bin"/stop-master.sh +"$sbin"/stop-slaves.sh +"$sbin"/stop-master.sh diff --git a/bin/stop-master.sh b/sbin/stop-master.sh similarity index 86% rename from bin/stop-master.sh rename to sbin/stop-master.sh index 310e33bedc..2adabd4265 100755 --- a/bin/stop-master.sh +++ b/sbin/stop-master.sh @@ -19,9 +19,9 @@ # Starts the master on the machine this script is executed on. -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" -"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1 +"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1 diff --git a/bin/stop-slaves.sh b/sbin/stop-slaves.sh similarity index 83% rename from bin/stop-slaves.sh rename to sbin/stop-slaves.sh index fcb8555d4e..c6b0b6ab66 100755 --- a/bin/stop-slaves.sh +++ b/sbin/stop-slaves.sh @@ -18,18 +18,18 @@ # bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +bin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then . "${SPARK_CONF_DIR}/spark-env.sh" fi if [ "$SPARK_WORKER_INSTANCES" = "" ]; then - "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1 + "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1 else for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do - "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 )) + "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 )) done fi