92502703f4
Slight correction: I'm leaving sparkR as-is (ie. R file not supported) and fixed only run-tests.sh as shivaram described. I also assume we are going to cover all doc changes in https://issues.apache.org/jira/browse/SPARK-12846 instead of here. rxin shivaram zjffdu Author: felixcheung <felixcheung_m@hotmail.com> Closes #10792 from felixcheung/sparkRcmd.
40 lines
1.4 KiB
Bash
Executable file
40 lines
1.4 KiB
Bash
Executable file
#!/bin/bash
|
|
|
|
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
FWDIR="$(cd `dirname $0`; pwd)"
|
|
|
|
FAILED=0
|
|
LOGFILE=$FWDIR/unit-tests.out
|
|
rm -f $LOGFILE
|
|
|
|
SPARK_TESTING=1 $FWDIR/../bin/spark-submit --driver-java-options "-Dlog4j.configuration=file:$FWDIR/log4j.properties" --conf spark.hadoop.fs.default.name="file:///" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE
|
|
FAILED=$((PIPESTATUS[0]||$FAILED))
|
|
|
|
if [[ $FAILED != 0 ]]; then
|
|
cat $LOGFILE
|
|
echo -en "\033[31m" # Red
|
|
echo "Had test failures; see logs."
|
|
echo -en "\033[0m" # No color
|
|
exit -1
|
|
else
|
|
echo -en "\033[32m" # Green
|
|
echo "Tests passed."
|
|
echo -en "\033[0m" # No color
|
|
fi
|