28132ceb10
Temporarily remove GREP_OPTIONS if set in bin/spark-class. Some GREP_OPTIONS will modify the output of the grep commands that are looking for the assembly jars. For example, if the -n option is specified, the grep output will look like: 5:spark-assembly-1.5.1-hadoop2.4.0.jar This will not match the regular expressions, and so the jar files will not be found. We could improve the regular expression to handle this case and trim off extra characters, but it is difficult to know which options may or may not be set. Unsetting GREP_OPTIONS within the script handles all the cases and gives the desired output. Author: Jeffrey Naisbitt <jnaisbitt@familysearch.org> Closes #9231 from naisbitt/unset-GREP_OPTIONS.
81 lines
2.7 KiB
Bash
Executable file
81 lines
2.7 KiB
Bash
Executable file
#!/usr/bin/env bash
|
|
|
|
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
# Figure out where Spark is installed
|
|
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
|
|
|
|
. "$SPARK_HOME"/bin/load-spark-env.sh
|
|
|
|
# Find the java binary
|
|
if [ -n "${JAVA_HOME}" ]; then
|
|
RUNNER="${JAVA_HOME}/bin/java"
|
|
else
|
|
if [ `command -v java` ]; then
|
|
RUNNER="java"
|
|
else
|
|
echo "JAVA_HOME is not set" >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
# Find assembly jar
|
|
SPARK_ASSEMBLY_JAR=
|
|
if [ -f "$SPARK_HOME/RELEASE" ]; then
|
|
ASSEMBLY_DIR="$SPARK_HOME/lib"
|
|
else
|
|
ASSEMBLY_DIR="$SPARK_HOME/assembly/target/scala-$SPARK_SCALA_VERSION"
|
|
fi
|
|
|
|
GREP_OPTIONS=
|
|
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" | wc -l)"
|
|
if [ "$num_jars" -eq "0" -a -z "$SPARK_ASSEMBLY_JAR" -a "$SPARK_PREPEND_CLASSES" != "1" ]; then
|
|
echo "Failed to find Spark assembly in $ASSEMBLY_DIR." 1>&2
|
|
echo "You need to build Spark before running this program." 1>&2
|
|
exit 1
|
|
fi
|
|
if [ -d "$ASSEMBLY_DIR" ]; then
|
|
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" || true)"
|
|
if [ "$num_jars" -gt "1" ]; then
|
|
echo "Found multiple Spark assembly jars in $ASSEMBLY_DIR:" 1>&2
|
|
echo "$ASSEMBLY_JARS" 1>&2
|
|
echo "Please remove all but one jar." 1>&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
SPARK_ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"
|
|
|
|
LAUNCH_CLASSPATH="$SPARK_ASSEMBLY_JAR"
|
|
|
|
# Add the launcher build dir to the classpath if requested.
|
|
if [ -n "$SPARK_PREPEND_CLASSES" ]; then
|
|
LAUNCH_CLASSPATH="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
|
|
fi
|
|
|
|
export _SPARK_ASSEMBLY="$SPARK_ASSEMBLY_JAR"
|
|
|
|
# The launcher library will print arguments separated by a NULL character, to allow arguments with
|
|
# characters that would be otherwise interpreted by the shell. Read that in a while loop, populating
|
|
# an array that will be used to exec the final command.
|
|
CMD=()
|
|
while IFS= read -d '' -r ARG; do
|
|
CMD+=("$ARG")
|
|
done < <("$RUNNER" -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@")
|
|
exec "${CMD[@]}"
|