1739e75fec
Some very rare JVM errors are printed to stdout, and that confuses the code in spark-class. So add a check so that those cases are detected and the proper error message is shown to the user. Tested by running spark-submit after setting "ulimit -v 32000". Closes #14231 Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #14508 from vanzin/SPARK-16586.
98 lines
3.1 KiB
Bash
Executable file
98 lines
3.1 KiB
Bash
Executable file
#!/usr/bin/env bash
|
|
|
|
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
if [ -z "${SPARK_HOME}" ]; then
|
|
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
|
|
fi
|
|
|
|
. "${SPARK_HOME}"/bin/load-spark-env.sh
|
|
|
|
# Find the java binary
|
|
if [ -n "${JAVA_HOME}" ]; then
|
|
RUNNER="${JAVA_HOME}/bin/java"
|
|
else
|
|
if [ `command -v java` ]; then
|
|
RUNNER="java"
|
|
else
|
|
echo "JAVA_HOME is not set" >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
# Find Spark jars.
|
|
if [ -f "${SPARK_HOME}/RELEASE" ]; then
|
|
SPARK_JARS_DIR="${SPARK_HOME}/jars"
|
|
else
|
|
SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars"
|
|
fi
|
|
|
|
if [ ! -d "$SPARK_JARS_DIR" ] && [ -z "$SPARK_TESTING$SPARK_SQL_TESTING" ]; then
|
|
echo "Failed to find Spark jars directory ($SPARK_JARS_DIR)." 1>&2
|
|
echo "You need to build Spark with the target \"package\" before running this program." 1>&2
|
|
exit 1
|
|
else
|
|
LAUNCH_CLASSPATH="$SPARK_JARS_DIR/*"
|
|
fi
|
|
|
|
# Add the launcher build dir to the classpath if requested.
|
|
if [ -n "$SPARK_PREPEND_CLASSES" ]; then
|
|
LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
|
|
fi
|
|
|
|
# For tests
|
|
if [[ -n "$SPARK_TESTING" ]]; then
|
|
unset YARN_CONF_DIR
|
|
unset HADOOP_CONF_DIR
|
|
fi
|
|
|
|
# The launcher library will print arguments separated by a NULL character, to allow arguments with
|
|
# characters that would be otherwise interpreted by the shell. Read that in a while loop, populating
|
|
# an array that will be used to exec the final command.
|
|
#
|
|
# The exit code of the launcher is appended to the output, so the parent shell removes it from the
|
|
# command array and checks the value to see if the launcher succeeded.
|
|
build_command() {
|
|
"$RUNNER" -Xmx128m -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@"
|
|
printf "%d\0" $?
|
|
}
|
|
|
|
CMD=()
|
|
while IFS= read -d '' -r ARG; do
|
|
CMD+=("$ARG")
|
|
done < <(build_command "$@")
|
|
|
|
COUNT=${#CMD[@]}
|
|
LAST=$((COUNT - 1))
|
|
LAUNCHER_EXIT_CODE=${CMD[$LAST]}
|
|
|
|
# Certain JVM failures result in errors being printed to stdout (instead of stderr), which causes
|
|
# the code that parses the output of the launcher to get confused. In those cases, check if the
|
|
# exit code is an integer, and if it's not, handle it as a special error case.
|
|
if ! [[ $LAUNCHER_EXIT_CODE =~ ^[0-9]+$ ]]; then
|
|
echo "${CMD[@]}" | head -n-1 1>&2
|
|
exit 1
|
|
fi
|
|
|
|
if [ $LAUNCHER_EXIT_CODE != 0 ]; then
|
|
exit $LAUNCHER_EXIT_CODE
|
|
fi
|
|
|
|
CMD=("${CMD[@]:0:$LAST}")
|
|
exec "${CMD[@]}"
|