2013-07-15 21:13:17 -04:00
|
|
|
#!/usr/bin/env bash
|
2013-07-16 20:21:33 -04:00
|
|
|
|
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
2013-06-24 20:05:37 -04:00
|
|
|
#
|
|
|
|
# Script to create a binary distribution for easy deploys of Spark.
|
|
|
|
# The distribution directory defaults to dist/ but can be overridden below.
|
|
|
|
# The distribution contains fat (assembly) jars that include the Scala library,
|
|
|
|
# so it is completely self contained.
|
2013-06-25 03:15:58 -04:00
|
|
|
# It does not contain source or *.class files.
|
2013-06-24 20:05:37 -04:00
|
|
|
|
2014-05-06 18:41:46 -04:00
|
|
|
set -o pipefail
|
2014-05-09 01:26:17 -04:00
|
|
|
set -e
|
2015-05-01 20:01:36 -04:00
|
|
|
set -x
|
2014-05-09 01:26:17 -04:00
|
|
|
|
2013-06-24 20:05:37 -04:00
|
|
|
# Figure out where the Spark framework is installed
|
2016-03-07 17:48:02 -05:00
|
|
|
SPARK_HOME="$(cd "`dirname "$0"`/.."; pwd)"
|
2015-01-09 12:40:18 -05:00
|
|
|
DISTDIR="$SPARK_HOME/dist"
|
2013-06-24 20:05:37 -04:00
|
|
|
|
2013-08-14 20:34:34 -04:00
|
|
|
MAKE_TGZ=false
|
2016-11-16 17:22:15 -05:00
|
|
|
MAKE_PIP=false
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
MAKE_R=false
|
2014-04-23 13:19:32 -04:00
|
|
|
NAME=none
|
2015-01-09 12:40:18 -05:00
|
|
|
MVN="$SPARK_HOME/build/mvn"
|
2013-08-14 20:34:34 -04:00
|
|
|
|
2014-07-17 04:02:35 -04:00
|
|
|
function exit_with_usage {
|
|
|
|
echo "make-distribution.sh - tool for making binary distributions of Spark"
|
|
|
|
echo ""
|
|
|
|
echo "usage:"
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
cl_options="[--name] [--tgz] [--pip] [--r] [--mvn <mvn-command>]"
|
2016-03-07 17:48:02 -05:00
|
|
|
echo "make-distribution.sh $cl_options <maven build options>"
|
2014-09-16 12:18:03 -04:00
|
|
|
echo "See Spark's \"Building Spark\" doc for correct Maven options."
|
2014-07-17 04:02:35 -04:00
|
|
|
echo ""
|
|
|
|
exit 1
|
|
|
|
}
|
|
|
|
|
2013-08-14 20:34:34 -04:00
|
|
|
# Parse arguments
|
|
|
|
while (( "$#" )); do
|
|
|
|
case $1 in
|
|
|
|
--hadoop)
|
2014-07-17 04:02:35 -04:00
|
|
|
echo "Error: '--hadoop' is no longer supported:"
|
2014-09-07 23:38:32 -04:00
|
|
|
echo "Error: use Maven profiles and options -Dhadoop.version and -Dyarn.version instead."
|
2016-06-14 08:59:01 -04:00
|
|
|
echo "Error: Related profiles include hadoop-2.2, hadoop-2.3, hadoop-2.4, hadoop-2.6 and hadoop-2.7."
|
2014-07-17 04:02:35 -04:00
|
|
|
exit_with_usage
|
2013-08-14 20:34:34 -04:00
|
|
|
;;
|
|
|
|
--with-yarn)
|
2014-07-17 04:02:35 -04:00
|
|
|
echo "Error: '--with-yarn' is no longer supported, use Maven option -Pyarn"
|
|
|
|
exit_with_usage
|
2013-08-14 20:34:34 -04:00
|
|
|
;;
|
2014-04-23 13:19:32 -04:00
|
|
|
--with-hive)
|
2014-11-12 00:36:48 -05:00
|
|
|
echo "Error: '--with-hive' is no longer supported, use Maven options -Phive and -Phive-thriftserver"
|
2014-07-17 04:02:35 -04:00
|
|
|
exit_with_usage
|
2014-04-23 13:19:32 -04:00
|
|
|
;;
|
2013-08-14 20:34:34 -04:00
|
|
|
--tgz)
|
|
|
|
MAKE_TGZ=true
|
|
|
|
;;
|
2016-11-16 17:22:15 -05:00
|
|
|
--pip)
|
|
|
|
MAKE_PIP=true
|
|
|
|
;;
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
--r)
|
|
|
|
MAKE_R=true
|
|
|
|
;;
|
2015-01-09 12:40:18 -05:00
|
|
|
--mvn)
|
|
|
|
MVN="$2"
|
|
|
|
shift
|
|
|
|
;;
|
2014-04-23 13:19:32 -04:00
|
|
|
--name)
|
|
|
|
NAME="$2"
|
|
|
|
shift
|
|
|
|
;;
|
2014-07-17 04:02:35 -04:00
|
|
|
--help)
|
|
|
|
exit_with_usage
|
|
|
|
;;
|
|
|
|
*)
|
|
|
|
break
|
|
|
|
;;
|
2013-08-14 20:34:34 -04:00
|
|
|
esac
|
|
|
|
shift
|
|
|
|
done
|
|
|
|
|
2014-06-24 22:31:20 -04:00
|
|
|
if [ -z "$JAVA_HOME" ]; then
|
|
|
|
# Fall back on JAVA_HOME from rpm, if found
|
2015-01-28 15:43:22 -05:00
|
|
|
if [ $(command -v rpm) ]; then
|
2015-02-12 17:52:38 -05:00
|
|
|
RPM_JAVA_HOME="$(rpm -E %java_home 2>/dev/null)"
|
2014-06-24 22:31:20 -04:00
|
|
|
if [ "$RPM_JAVA_HOME" != "%java_home" ]; then
|
2015-02-12 17:52:38 -05:00
|
|
|
JAVA_HOME="$RPM_JAVA_HOME"
|
2014-06-24 22:31:20 -04:00
|
|
|
echo "No JAVA_HOME set, proceeding with '$JAVA_HOME' learned from rpm"
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
2014-05-24 21:27:00 -04:00
|
|
|
if [ -z "$JAVA_HOME" ]; then
|
|
|
|
echo "Error: JAVA_HOME is not set, cannot proceed."
|
|
|
|
exit -1
|
|
|
|
fi
|
|
|
|
|
2015-01-28 15:43:22 -05:00
|
|
|
if [ $(command -v git) ]; then
|
2014-06-28 16:07:12 -04:00
|
|
|
GITREV=$(git rev-parse --short HEAD 2>/dev/null || :)
|
2015-02-12 17:52:38 -05:00
|
|
|
if [ ! -z "$GITREV" ]; then
|
2016-03-07 17:48:02 -05:00
|
|
|
GITREVSTRING=" (git revision $GITREV)"
|
2014-06-28 16:07:12 -04:00
|
|
|
fi
|
|
|
|
unset GITREV
|
|
|
|
fi
|
|
|
|
|
2015-01-28 15:43:22 -05:00
|
|
|
|
2015-10-13 17:11:08 -04:00
|
|
|
if [ ! "$(command -v "$MVN")" ] ; then
|
2015-01-09 12:40:18 -05:00
|
|
|
echo -e "Could not locate Maven command: '$MVN'."
|
|
|
|
echo -e "Specify the Maven command with the --mvn flag"
|
2014-05-24 21:27:00 -04:00
|
|
|
exit -1;
|
|
|
|
fi
|
2014-08-29 18:29:43 -04:00
|
|
|
|
2015-05-01 20:01:36 -04:00
|
|
|
VERSION=$("$MVN" help:evaluate -Dexpression=project.version $@ 2>/dev/null | grep -v "INFO" | tail -n 1)
|
2015-03-12 15:16:58 -04:00
|
|
|
SCALA_VERSION=$("$MVN" help:evaluate -Dexpression=scala.binary.version $@ 2>/dev/null\
|
|
|
|
| grep -v "INFO"\
|
|
|
|
| tail -n 1)
|
2015-02-12 17:52:38 -05:00
|
|
|
SPARK_HADOOP_VERSION=$("$MVN" help:evaluate -Dexpression=hadoop.version $@ 2>/dev/null\
|
2014-08-29 18:29:43 -04:00
|
|
|
| grep -v "INFO"\
|
|
|
|
| tail -n 1)
|
2015-02-12 17:52:38 -05:00
|
|
|
SPARK_HIVE=$("$MVN" help:evaluate -Dexpression=project.activeProfiles -pl sql/hive $@ 2>/dev/null\
|
2014-08-29 18:29:43 -04:00
|
|
|
| grep -v "INFO"\
|
|
|
|
| fgrep --count "<id>hive</id>";\
|
|
|
|
# Reset exit status to 0, otherwise the script stops here if the last grep finds nothing\
|
|
|
|
# because we use "set -o pipefail"
|
|
|
|
echo -n)
|
2014-05-24 21:27:00 -04:00
|
|
|
|
2014-04-23 13:19:32 -04:00
|
|
|
if [ "$NAME" == "none" ]; then
|
|
|
|
NAME=$SPARK_HADOOP_VERSION
|
|
|
|
fi
|
|
|
|
|
|
|
|
echo "Spark version is $VERSION"
|
|
|
|
|
2013-08-14 20:34:34 -04:00
|
|
|
if [ "$MAKE_TGZ" == "true" ]; then
|
2014-04-23 13:19:32 -04:00
|
|
|
echo "Making spark-$VERSION-bin-$NAME.tgz"
|
2013-07-15 21:13:17 -04:00
|
|
|
else
|
2014-04-23 13:19:32 -04:00
|
|
|
echo "Making distribution for Spark $VERSION in $DISTDIR..."
|
2013-07-15 21:13:17 -04:00
|
|
|
fi
|
|
|
|
|
2014-04-23 13:19:32 -04:00
|
|
|
# Build uber fat JAR
|
2015-01-09 12:40:18 -05:00
|
|
|
cd "$SPARK_HOME"
|
2014-01-02 03:39:37 -05:00
|
|
|
|
2015-12-23 19:00:03 -05:00
|
|
|
export MAVEN_OPTS="${MAVEN_OPTS:--Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m}"
|
2014-04-23 13:19:32 -04:00
|
|
|
|
2015-01-26 17:26:10 -05:00
|
|
|
# Store the command as an array because $MVN variable might have spaces in it.
|
|
|
|
# Normal quoting tricks don't work.
|
|
|
|
# See: http://mywiki.wooledge.org/BashFAQ/050
|
2016-06-14 08:59:01 -04:00
|
|
|
BUILD_COMMAND=("$MVN" -T 1C clean package -DskipTests $@)
|
2014-05-06 01:14:47 -04:00
|
|
|
|
|
|
|
# Actually build the jar
|
|
|
|
echo -e "\nBuilding with..."
|
2015-01-26 17:26:10 -05:00
|
|
|
echo -e "\$ ${BUILD_COMMAND[@]}\n"
|
2014-07-17 04:02:35 -04:00
|
|
|
|
2015-01-26 17:26:10 -05:00
|
|
|
"${BUILD_COMMAND[@]}"
|
2013-06-24 20:05:37 -04:00
|
|
|
|
|
|
|
# Make directories
|
|
|
|
rm -rf "$DISTDIR"
|
2016-04-04 19:52:21 -04:00
|
|
|
mkdir -p "$DISTDIR/jars"
|
2014-06-28 16:07:12 -04:00
|
|
|
echo "Spark $VERSION$GITREVSTRING built for Hadoop $SPARK_HADOOP_VERSION" > "$DISTDIR/RELEASE"
|
2015-01-09 12:40:18 -05:00
|
|
|
echo "Build flags: $@" >> "$DISTDIR/RELEASE"
|
2013-06-24 20:05:37 -04:00
|
|
|
|
|
|
|
# Copy jars
|
2016-04-04 19:52:21 -04:00
|
|
|
cp "$SPARK_HOME"/assembly/target/scala*/jars/* "$DISTDIR/jars/"
|
|
|
|
|
|
|
|
# Only create the yarn directory if the yarn artifacts were build.
|
|
|
|
if [ -f "$SPARK_HOME"/common/network-yarn/target/scala*/spark-*-yarn-shuffle.jar ]; then
|
|
|
|
mkdir "$DISTDIR"/yarn
|
|
|
|
cp "$SPARK_HOME"/common/network-yarn/target/scala*/spark-*-yarn-shuffle.jar "$DISTDIR/yarn"
|
|
|
|
fi
|
2013-06-24 20:05:37 -04:00
|
|
|
|
[SPARK-13576][BUILD] Don't create assembly for examples.
As part of the goal to stop creating assemblies in Spark, this change
modifies the mvn and sbt builds to not create an assembly for examples.
Instead, dependencies are copied to the build directory (under
target/scala-xx/jars), and in the final archive, into the "examples/jars"
directory.
To avoid having to deal too much with Windows batch files, I made examples
run through the launcher library; the spark-submit launcher now has a
special mode to run examples, which adds all the necessary jars to the
spark-submit command line, and replaces the bash and batch scripts that
were used to run examples. The scripts are now just a thin wrapper around
spark-submit; another advantage is that now all spark-submit options are
supported.
There are a few glitches; in the mvn build, a lot of duplicated dependencies
get copied, because they are promoted to "compile" scope due to extra
dependencies in the examples module (such as HBase). In the sbt build,
all dependencies are copied, because there doesn't seem to be an easy
way to filter things.
I plan to clean some of this up when the rest of the tasks are finished.
When the main assembly is replaced with jars, we can remove duplicate jars
from the examples directory during packaging.
Tested by running SparkPi in: maven build, sbt build, dist created by
make-distribution.sh.
Finally: note that running the "assembly" target in sbt doesn't build
the examples anymore. You need to run "package" for that.
Author: Marcelo Vanzin <vanzin@cloudera.com>
Closes #11452 from vanzin/SPARK-13576.
2016-03-15 12:44:48 -04:00
|
|
|
# Copy examples and dependencies
|
|
|
|
mkdir -p "$DISTDIR/examples/jars"
|
|
|
|
cp "$SPARK_HOME"/examples/target/scala*/jars/* "$DISTDIR/examples/jars"
|
|
|
|
|
2016-04-04 19:52:21 -04:00
|
|
|
# Deduplicate jars that have already been packaged as part of the main Spark dependencies.
|
|
|
|
for f in "$DISTDIR/examples/jars/"*; do
|
|
|
|
name=$(basename "$f")
|
|
|
|
if [ -f "$DISTDIR/jars/$name" ]; then
|
|
|
|
rm "$DISTDIR/examples/jars/$name"
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2014-05-13 02:02:54 -04:00
|
|
|
# Copy example sources (needed for python and SQL)
|
|
|
|
mkdir -p "$DISTDIR/examples/src/main"
|
2015-01-09 12:40:18 -05:00
|
|
|
cp -r "$SPARK_HOME"/examples/src/main "$DISTDIR/examples/src/"
|
2014-05-13 02:02:54 -04:00
|
|
|
|
|
|
|
# Copy license and ASF files
|
2015-01-09 12:40:18 -05:00
|
|
|
cp "$SPARK_HOME/LICENSE" "$DISTDIR"
|
2015-09-28 22:56:43 -04:00
|
|
|
cp -r "$SPARK_HOME/licenses" "$DISTDIR"
|
2015-01-09 12:40:18 -05:00
|
|
|
cp "$SPARK_HOME/NOTICE" "$DISTDIR"
|
2014-05-13 02:02:54 -04:00
|
|
|
|
2015-01-09 12:40:18 -05:00
|
|
|
if [ -e "$SPARK_HOME"/CHANGES.txt ]; then
|
|
|
|
cp "$SPARK_HOME/CHANGES.txt" "$DISTDIR"
|
2014-05-13 02:02:54 -04:00
|
|
|
fi
|
|
|
|
|
2014-12-01 03:31:04 -05:00
|
|
|
# Copy data files
|
2015-01-09 12:40:18 -05:00
|
|
|
cp -r "$SPARK_HOME/data" "$DISTDIR"
|
2014-12-01 03:31:04 -05:00
|
|
|
|
2016-11-16 17:22:15 -05:00
|
|
|
# Make pip package
|
|
|
|
if [ "$MAKE_PIP" == "true" ]; then
|
|
|
|
echo "Building python distribution package"
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
pushd "$SPARK_HOME/python" > /dev/null
|
2016-11-16 17:22:15 -05:00
|
|
|
python setup.py sdist
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
popd > /dev/null
|
|
|
|
else
|
|
|
|
echo "Skipping building python distribution package"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Make R package - this is used for both CRAN release and packing R layout into distribution
|
|
|
|
if [ "$MAKE_R" == "true" ]; then
|
|
|
|
echo "Building R source package"
|
2016-12-08 21:26:54 -05:00
|
|
|
R_PACKAGE_VERSION=`grep Version $SPARK_HOME/R/pkg/DESCRIPTION | awk '{print $NF}'`
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
pushd "$SPARK_HOME/R" > /dev/null
|
|
|
|
# Build source package and run full checks
|
|
|
|
# Install source package to get it to generate vignettes, etc.
|
|
|
|
# Do not source the check-cran.sh - it should be run from where it is for it to set SPARK_HOME
|
|
|
|
NO_TESTS=1 CLEAN_INSTALL=1 "$SPARK_HOME/"R/check-cran.sh
|
2016-12-08 21:26:54 -05:00
|
|
|
# Make a copy of R source package matching the Spark release version.
|
|
|
|
cp $SPARK_HOME/R/SparkR_"$R_PACKAGE_VERSION".tar.gz $SPARK_HOME/R/SparkR_"$VERSION".tar.gz
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
popd > /dev/null
|
2016-11-16 17:22:15 -05:00
|
|
|
else
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
echo "Skipping building R source package"
|
2016-11-16 17:22:15 -05:00
|
|
|
fi
|
|
|
|
|
2013-06-24 20:05:37 -04:00
|
|
|
# Copy other things
|
2013-08-23 02:02:09 -04:00
|
|
|
mkdir "$DISTDIR"/conf
|
2015-01-09 12:40:18 -05:00
|
|
|
cp "$SPARK_HOME"/conf/*.template "$DISTDIR"/conf
|
|
|
|
cp "$SPARK_HOME/README.md" "$DISTDIR"
|
|
|
|
cp -r "$SPARK_HOME/bin" "$DISTDIR"
|
|
|
|
cp -r "$SPARK_HOME/python" "$DISTDIR"
|
2016-12-08 21:26:54 -05:00
|
|
|
|
|
|
|
# Remove the python distribution from dist/ if we built it
|
|
|
|
if [ "$MAKE_PIP" == "true" ]; then
|
|
|
|
rm -f $DISTDIR/python/dist/pyspark-*.tar.gz
|
|
|
|
fi
|
|
|
|
|
2015-01-09 12:40:18 -05:00
|
|
|
cp -r "$SPARK_HOME/sbin" "$DISTDIR"
|
2015-05-23 15:28:16 -04:00
|
|
|
# Copy SparkR if it exists
|
|
|
|
if [ -d "$SPARK_HOME"/R/lib/SparkR ]; then
|
|
|
|
mkdir -p "$DISTDIR"/R/lib
|
|
|
|
cp -r "$SPARK_HOME/R/lib/SparkR" "$DISTDIR"/R/lib
|
2015-11-15 22:29:09 -05:00
|
|
|
cp "$SPARK_HOME/R/lib/sparkr.zip" "$DISTDIR"/R/lib
|
2015-05-23 15:28:16 -04:00
|
|
|
fi
|
2013-07-15 21:13:17 -04:00
|
|
|
|
2013-08-14 20:34:34 -04:00
|
|
|
if [ "$MAKE_TGZ" == "true" ]; then
|
2014-04-23 13:19:32 -04:00
|
|
|
TARDIR_NAME=spark-$VERSION-bin-$NAME
|
2015-01-09 12:40:18 -05:00
|
|
|
TARDIR="$SPARK_HOME/$TARDIR_NAME"
|
2014-04-27 18:50:48 -04:00
|
|
|
rm -rf "$TARDIR"
|
2013-08-14 20:34:34 -04:00
|
|
|
cp -r "$DISTDIR" "$TARDIR"
|
2015-01-09 12:40:18 -05:00
|
|
|
tar czf "spark-$VERSION-bin-$NAME.tgz" -C "$SPARK_HOME" "$TARDIR_NAME"
|
2013-08-14 20:34:34 -04:00
|
|
|
rm -rf "$TARDIR"
|
2013-07-15 21:13:17 -04:00
|
|
|
fi
|