457e58befe
## What changes were proposed in this pull request? Change our build docs & shell scripts to that developers are aware of the change from "assembly" to "package" ## How was this patch tested? Manually ran ./bin/spark-shell after ./build/sbt assembly and verified error message printed, ran new suggested build target and verified ./bin/spark-shell runs after this. Author: Holden Karau <holden@pigscanfly.ca> Author: Holden Karau <holden@us.ibm.com> Closes #12197 from holdenk/SPARK-1424-spark-class-broken-fix-build-docs.
72 lines
2.3 KiB
Bash
Executable file
72 lines
2.3 KiB
Bash
Executable file
#!/usr/bin/env bash
|
|
|
|
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
if [ -z "${SPARK_HOME}" ]; then
|
|
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
|
|
fi
|
|
|
|
. "${SPARK_HOME}"/bin/load-spark-env.sh
|
|
|
|
# Find the java binary
|
|
if [ -n "${JAVA_HOME}" ]; then
|
|
RUNNER="${JAVA_HOME}/bin/java"
|
|
else
|
|
if [ `command -v java` ]; then
|
|
RUNNER="java"
|
|
else
|
|
echo "JAVA_HOME is not set" >&2
|
|
exit 1
|
|
fi
|
|
fi
|
|
|
|
# Find Spark jars.
|
|
if [ -f "${SPARK_HOME}/RELEASE" ]; then
|
|
SPARK_JARS_DIR="${SPARK_HOME}/jars"
|
|
else
|
|
SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars"
|
|
fi
|
|
|
|
if [ ! -d "$SPARK_JARS_DIR" ] && [ -z "$SPARK_TESTING$SPARK_SQL_TESTING" ]; then
|
|
echo "Failed to find Spark jars directory ($SPARK_JARS_DIR)." 1>&2
|
|
echo "You need to build Spark with the target \"package\" before running this program." 1>&2
|
|
exit 1
|
|
else
|
|
LAUNCH_CLASSPATH="$SPARK_JARS_DIR/*"
|
|
fi
|
|
|
|
# Add the launcher build dir to the classpath if requested.
|
|
if [ -n "$SPARK_PREPEND_CLASSES" ]; then
|
|
LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
|
|
fi
|
|
|
|
# For tests
|
|
if [[ -n "$SPARK_TESTING" ]]; then
|
|
unset YARN_CONF_DIR
|
|
unset HADOOP_CONF_DIR
|
|
fi
|
|
|
|
# The launcher library will print arguments separated by a NULL character, to allow arguments with
|
|
# characters that would be otherwise interpreted by the shell. Read that in a while loop, populating
|
|
# an array that will be used to exec the final command.
|
|
CMD=()
|
|
while IFS= read -d '' -r ARG; do
|
|
CMD+=("$ARG")
|
|
done < <("$RUNNER" -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@")
|
|
exec "${CMD[@]}"
|