2013-07-17 20:51:18 -04:00
|
|
|
#!/usr/bin/env bash
|
2010-03-29 19:17:55 -04:00
|
|
|
|
2013-07-16 20:21:33 -04:00
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
2015-03-11 04:03:01 -04:00
|
|
|
set -e
|
2013-12-15 21:39:30 -05:00
|
|
|
|
2014-05-21 04:22:25 -04:00
|
|
|
# Figure out where Spark is installed
|
2015-03-11 04:03:01 -04:00
|
|
|
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
|
2010-03-29 19:17:55 -04:00
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
. "$SPARK_HOME"/bin/load-spark-env.sh
|
2010-07-19 21:00:30 -04:00
|
|
|
|
2013-02-06 17:34:46 -05:00
|
|
|
if [ -z "$1" ]; then
|
2014-07-03 18:06:58 -04:00
|
|
|
echo "Usage: spark-class <class> [<args>]" 1>&2
|
2013-02-06 17:34:46 -05:00
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
2013-08-24 02:30:17 -04:00
|
|
|
# Find the java binary
|
|
|
|
if [ -n "${JAVA_HOME}" ]; then
|
|
|
|
RUNNER="${JAVA_HOME}/bin/java"
|
2012-10-22 16:10:47 -04:00
|
|
|
else
|
2013-08-24 02:30:17 -04:00
|
|
|
if [ `command -v java` ]; then
|
|
|
|
RUNNER="java"
|
2012-10-22 16:10:47 -04:00
|
|
|
else
|
2013-08-24 02:30:17 -04:00
|
|
|
echo "JAVA_HOME is not set" >&2
|
|
|
|
exit 1
|
2012-10-22 16:10:47 -04:00
|
|
|
fi
|
2012-07-08 17:00:04 -04:00
|
|
|
fi
|
2013-11-09 16:37:11 -05:00
|
|
|
|
2015-03-29 07:40:37 -04:00
|
|
|
# Find assembly jar
|
|
|
|
SPARK_ASSEMBLY_JAR=
|
|
|
|
if [ -f "$SPARK_HOME/RELEASE" ]; then
|
|
|
|
ASSEMBLY_DIR="$SPARK_HOME/lib"
|
|
|
|
else
|
|
|
|
ASSEMBLY_DIR="$SPARK_HOME/assembly/target/scala-$SPARK_SCALA_VERSION"
|
|
|
|
fi
|
2014-05-04 15:22:23 -04:00
|
|
|
|
2015-03-29 07:40:37 -04:00
|
|
|
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" | wc -l)"
|
|
|
|
if [ "$num_jars" -eq "0" -a -z "$SPARK_ASSEMBLY_JAR" ]; then
|
|
|
|
echo "Failed to find Spark assembly in $ASSEMBLY_DIR." 1>&2
|
|
|
|
echo "You need to build Spark before running this program." 1>&2
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" || true)"
|
|
|
|
if [ "$num_jars" -gt "1" ]; then
|
|
|
|
echo "Found multiple Spark assembly jars in $ASSEMBLY_DIR:" 1>&2
|
|
|
|
echo "$ASSEMBLY_JARS" 1>&2
|
|
|
|
echo "Please remove all but one jar." 1>&2
|
|
|
|
exit 1
|
|
|
|
fi
|
2013-12-15 21:39:30 -05:00
|
|
|
|
2015-03-29 07:40:37 -04:00
|
|
|
SPARK_ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"
|
|
|
|
|
|
|
|
# Verify that versions of java used to build the jars and run Spark are compatible
|
|
|
|
if [ -n "$JAVA_HOME" ]; then
|
|
|
|
JAR_CMD="$JAVA_HOME/bin/jar"
|
2015-03-11 04:03:01 -04:00
|
|
|
else
|
2015-03-29 07:40:37 -04:00
|
|
|
JAR_CMD="jar"
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ $(command -v "$JAR_CMD") ] ; then
|
|
|
|
jar_error_check=$("$JAR_CMD" -tf "$SPARK_ASSEMBLY_JAR" nonexistent/class/path 2>&1)
|
|
|
|
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
|
|
|
|
echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
|
|
|
|
echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
|
|
|
|
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
|
|
|
|
echo "or build Spark with Java 6." 1>&2
|
2015-03-11 04:03:01 -04:00
|
|
|
exit 1
|
2013-12-30 15:46:09 -05:00
|
|
|
fi
|
2013-12-15 21:39:30 -05:00
|
|
|
fi
|
2010-03-29 19:17:55 -04:00
|
|
|
|
2015-04-14 21:51:39 -04:00
|
|
|
LAUNCH_CLASSPATH="$SPARK_ASSEMBLY_JAR"
|
|
|
|
|
|
|
|
# Add the launcher build dir to the classpath if requested.
|
|
|
|
if [ -n "$SPARK_PREPEND_CLASSES" ]; then
|
|
|
|
LAUNCH_CLASSPATH="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
|
|
|
|
fi
|
|
|
|
|
|
|
|
export _SPARK_ASSEMBLY="$SPARK_ASSEMBLY_JAR"
|
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
# The launcher library will print arguments separated by a NULL character, to allow arguments with
|
|
|
|
# characters that would be otherwise interpreted by the shell. Read that in a while loop, populating
|
|
|
|
# an array that will be used to exec the final command.
|
|
|
|
CMD=()
|
|
|
|
while IFS= read -d '' -r ARG; do
|
|
|
|
CMD+=("$ARG")
|
2015-04-14 21:51:39 -04:00
|
|
|
done < <("$RUNNER" -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@")
|
2014-08-20 18:01:47 -04:00
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
if [ "${CMD[0]}" = "usage" ]; then
|
|
|
|
"${CMD[@]}"
|
2014-08-20 18:01:47 -04:00
|
|
|
else
|
2015-03-11 04:03:01 -04:00
|
|
|
exec "${CMD[@]}"
|
2010-03-29 19:17:55 -04:00
|
|
|
fi
|