2015-08-12 00:16:48 -04:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
|
|
|
function exit_with_usage {
|
|
|
|
cat << EOF
|
|
|
|
usage: release-build.sh <package|docs|publish-snapshot|publish-release>
|
|
|
|
Creates build deliverables from a Spark commit.
|
|
|
|
|
|
|
|
Top level targets are
|
2016-02-26 21:40:00 -05:00
|
|
|
package: Create binary packages and copy them to home.apache
|
|
|
|
docs: Build docs and copy them to home.apache
|
2015-08-12 00:16:48 -04:00
|
|
|
publish-snapshot: Publish snapshot release to Apache snapshots
|
|
|
|
publish-release: Publish a release to Apache release repo
|
|
|
|
|
|
|
|
All other inputs are environment variables
|
|
|
|
|
|
|
|
GIT_REF - Release tag or commit to build from
|
|
|
|
SPARK_VERSION - Release identifier used when publishing
|
|
|
|
SPARK_PACKAGE_VERSION - Release identifier in top level package directory
|
|
|
|
REMOTE_PARENT_DIR - Parent in which to create doc or release builds.
|
|
|
|
REMOTE_PARENT_MAX_LENGTH - If set, parent directory will be cleaned to only
|
|
|
|
have this number of subdirectories (by deleting old ones). WARNING: This deletes data.
|
|
|
|
|
|
|
|
ASF_USERNAME - Username of ASF committer account
|
|
|
|
ASF_PASSWORD - Password of ASF committer account
|
|
|
|
ASF_RSA_KEY - RSA private key file for ASF committer account
|
|
|
|
|
|
|
|
GPG_KEY - GPG key used to sign release artifacts
|
|
|
|
GPG_PASSPHRASE - Passphrase for GPG key
|
|
|
|
EOF
|
|
|
|
exit 1
|
|
|
|
}
|
|
|
|
|
|
|
|
set -e
|
|
|
|
|
|
|
|
if [ $# -eq 0 ]; then
|
|
|
|
exit_with_usage
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ $@ == *"help"* ]]; then
|
|
|
|
exit_with_usage
|
|
|
|
fi
|
|
|
|
|
|
|
|
for env in ASF_USERNAME ASF_RSA_KEY GPG_PASSPHRASE GPG_KEY; do
|
|
|
|
if [ -z "${!env}" ]; then
|
|
|
|
echo "ERROR: $env must be set to run this script"
|
|
|
|
exit_with_usage
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2016-02-26 21:40:00 -05:00
|
|
|
# Explicitly set locale in order to make `sort` output consistent across machines.
|
|
|
|
# See https://stackoverflow.com/questions/28881 for more details.
|
|
|
|
export LC_ALL=C
|
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
# Commit ref to checkout when building
|
|
|
|
GIT_REF=${GIT_REF:-master}
|
|
|
|
|
|
|
|
# Destination directory parent on remote server
|
|
|
|
REMOTE_PARENT_DIR=${REMOTE_PARENT_DIR:-/home/$ASF_USERNAME/public_html}
|
|
|
|
|
|
|
|
GPG="gpg --no-tty --batch"
|
|
|
|
NEXUS_ROOT=https://repository.apache.org/service/local/staging
|
|
|
|
NEXUS_PROFILE=d63f592e7eac0 # Profile for Spark staging uploads
|
|
|
|
BASE_DIR=$(pwd)
|
|
|
|
|
|
|
|
MVN="build/mvn --force"
|
2017-09-13 05:10:40 -04:00
|
|
|
|
|
|
|
# Hive-specific profiles for some builds
|
|
|
|
HIVE_PROFILES="-Phive -Phive-thriftserver"
|
|
|
|
# Profiles for publishing snapshots and release to Maven Central
|
|
|
|
PUBLISH_PROFILES="-Pmesos -Pyarn $HIVE_PROFILES -Pspark-ganglia-lgpl -Pkinesis-asl"
|
|
|
|
# Profiles for building binary releases
|
|
|
|
BASE_RELEASE_PROFILES="-Pmesos -Pyarn -Psparkr"
|
|
|
|
# Scala 2.11 only profiles for some builds
|
|
|
|
SCALA_2_11_PROFILES="-Pkafka-0-8"
|
|
|
|
# Scala 2.12 only profiles for some builds
|
|
|
|
SCALA_2_12_PROFILES="-Pscala-2.12"
|
2015-08-12 00:16:48 -04:00
|
|
|
|
|
|
|
rm -rf spark
|
|
|
|
git clone https://git-wip-us.apache.org/repos/asf/spark.git
|
|
|
|
cd spark
|
|
|
|
git checkout $GIT_REF
|
|
|
|
git_hash=`git rev-parse --short HEAD`
|
|
|
|
echo "Checked out Spark git hash $git_hash"
|
|
|
|
|
|
|
|
if [ -z "$SPARK_VERSION" ]; then
|
|
|
|
SPARK_VERSION=$($MVN help:evaluate -Dexpression=project.version \
|
|
|
|
| grep -v INFO | grep -v WARNING | grep -v Download)
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -z "$SPARK_PACKAGE_VERSION" ]; then
|
|
|
|
SPARK_PACKAGE_VERSION="${SPARK_VERSION}-$(date +%Y_%m_%d_%H_%M)-${git_hash}"
|
|
|
|
fi
|
|
|
|
|
|
|
|
DEST_DIR_NAME="spark-$SPARK_PACKAGE_VERSION"
|
2016-02-26 21:40:00 -05:00
|
|
|
|
|
|
|
function LFTP {
|
|
|
|
SSH="ssh -o ConnectTimeout=300 -o StrictHostKeyChecking=no -i $ASF_RSA_KEY"
|
|
|
|
COMMANDS=$(cat <<EOF
|
|
|
|
set net:max-retries 1 &&
|
|
|
|
set sftp:connect-program $SSH &&
|
|
|
|
connect -u $ASF_USERNAME,p sftp://home.apache.org &&
|
|
|
|
$@
|
|
|
|
EOF
|
|
|
|
)
|
|
|
|
lftp --norc -c "$COMMANDS"
|
|
|
|
}
|
|
|
|
export -f LFTP
|
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
|
2015-09-16 05:47:30 -04:00
|
|
|
git clean -d -f -x
|
2015-08-12 00:16:48 -04:00
|
|
|
rm .gitignore
|
|
|
|
rm -rf .git
|
|
|
|
cd ..
|
|
|
|
|
|
|
|
if [ -n "$REMOTE_PARENT_MAX_LENGTH" ]; then
|
2016-02-26 21:40:00 -05:00
|
|
|
old_dirs=$(
|
|
|
|
LFTP nlist $REMOTE_PARENT_DIR \
|
|
|
|
| grep -v "^\." \
|
|
|
|
| sort -r \
|
|
|
|
| tail -n +$REMOTE_PARENT_MAX_LENGTH)
|
2015-08-12 00:16:48 -04:00
|
|
|
for old_dir in $old_dirs; do
|
|
|
|
echo "Removing directory: $old_dir"
|
2016-02-26 21:40:00 -05:00
|
|
|
LFTP "rm -rf $REMOTE_PARENT_DIR/$old_dir && exit 0"
|
2015-08-12 00:16:48 -04:00
|
|
|
done
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ "$1" == "package" ]]; then
|
|
|
|
# Source and binary tarballs
|
|
|
|
echo "Packaging release tarballs"
|
|
|
|
cp -r spark spark-$SPARK_VERSION
|
|
|
|
tar cvzf spark-$SPARK_VERSION.tgz spark-$SPARK_VERSION
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour --output spark-$SPARK_VERSION.tgz.asc \
|
|
|
|
--detach-sig spark-$SPARK_VERSION.tgz
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md MD5 spark-$SPARK_VERSION.tgz > \
|
|
|
|
spark-$SPARK_VERSION.tgz.md5
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
|
|
|
|
SHA512 spark-$SPARK_VERSION.tgz > spark-$SPARK_VERSION.tgz.sha
|
|
|
|
rm -rf spark-$SPARK_VERSION
|
|
|
|
|
|
|
|
# Updated for each binary build
|
|
|
|
make_binary_release() {
|
|
|
|
NAME=$1
|
|
|
|
FLAGS=$2
|
|
|
|
ZINC_PORT=$3
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
BUILD_PACKAGE=$4
|
2015-08-12 00:16:48 -04:00
|
|
|
cp -r spark spark-$SPARK_VERSION-bin-$NAME
|
|
|
|
|
|
|
|
cd spark-$SPARK_VERSION-bin-$NAME
|
|
|
|
|
2017-07-13 05:06:24 -04:00
|
|
|
# TODO There should probably be a flag to make-distribution to allow 2.12 support
|
|
|
|
#if [[ $FLAGS == *scala-2.12* ]]; then
|
|
|
|
# ./dev/change-scala-version.sh 2.12
|
|
|
|
#fi
|
2015-08-12 00:16:48 -04:00
|
|
|
|
|
|
|
export ZINC_PORT=$ZINC_PORT
|
|
|
|
echo "Creating distribution: $NAME ($FLAGS)"
|
2015-10-13 18:18:20 -04:00
|
|
|
|
2017-05-09 14:25:29 -04:00
|
|
|
# Write out the VERSION to PySpark version info we rewrite the - into a . and SNAPSHOT
|
|
|
|
# to dev0 to be closer to PEP440.
|
|
|
|
PYSPARK_VERSION=`echo "$SPARK_VERSION" | sed -r "s/-/./" | sed -r "s/SNAPSHOT/dev0/"`
|
2016-11-16 17:22:15 -05:00
|
|
|
echo "__version__='$PYSPARK_VERSION'" > python/pyspark/version.py
|
|
|
|
|
2015-10-13 18:18:20 -04:00
|
|
|
# Get maven home set by MVN
|
|
|
|
MVN_HOME=`$MVN -version 2>&1 | grep 'Maven home' | awk '{print $NF}'`
|
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
if [ -z "$BUILD_PACKAGE" ]; then
|
|
|
|
echo "Creating distribution without PIP/R package"
|
2016-12-01 20:58:28 -05:00
|
|
|
./dev/make-distribution.sh --name $NAME --mvn $MVN_HOME/bin/mvn --tgz $FLAGS \
|
|
|
|
-DzincPort=$ZINC_PORT 2>&1 > ../binary-release-$NAME.log
|
|
|
|
cd ..
|
[SPARK-18590][SPARKR] build R source package when making distribution
## What changes were proposed in this pull request?
This PR has 2 key changes. One, we are building source package (aka bundle package) for SparkR which could be released on CRAN. Two, we should include in the official Spark binary distributions SparkR installed from this source package instead (which would have help/vignettes rds needed for those to work when the SparkR package is loaded in R, whereas earlier approach with devtools does not)
But, because of various differences in how R performs different tasks, this PR is a fair bit more complicated. More details below.
This PR also includes a few minor fixes.
### more details
These are the additional steps in make-distribution; please see [here](https://github.com/apache/spark/blob/master/R/CRAN_RELEASE.md) on what's going to a CRAN release, which is now run during make-distribution.sh.
1. package needs to be installed because the first code block in vignettes is `library(SparkR)` without lib path
2. `R CMD build` will build vignettes (this process runs Spark/SparkR code and captures outputs into pdf documentation)
3. `R CMD check` on the source package will install package and build vignettes again (this time from source packaged) - this is a key step required to release R package on CRAN
(will skip tests here but tests will need to pass for CRAN release process to success - ideally, during release signoff we should install from the R source package and run tests)
4. `R CMD Install` on the source package (this is the only way to generate doc/vignettes rds files correctly, not in step # 1)
(the output of this step is what we package into Spark dist and sparkr.zip)
Alternatively,
R CMD build should already be installing the package in a temp directory though it might just be finding this location and set it to lib.loc parameter; another approach is perhaps we could try calling `R CMD INSTALL --build pkg` instead.
But in any case, despite installing the package multiple times this is relatively fast.
Building vignettes takes a while though.
## How was this patch tested?
Manually, CI.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16014 from felixcheung/rdist.
2016-12-08 14:29:31 -05:00
|
|
|
elif [[ "$BUILD_PACKAGE" == "withr" ]]; then
|
|
|
|
echo "Creating distribution with R package"
|
|
|
|
./dev/make-distribution.sh --name $NAME --mvn $MVN_HOME/bin/mvn --tgz --r $FLAGS \
|
|
|
|
-DzincPort=$ZINC_PORT 2>&1 > ../binary-release-$NAME.log
|
|
|
|
cd ..
|
|
|
|
|
|
|
|
echo "Copying and signing R source package"
|
|
|
|
R_DIST_NAME=SparkR_$SPARK_VERSION.tar.gz
|
|
|
|
cp spark-$SPARK_VERSION-bin-$NAME/R/$R_DIST_NAME .
|
|
|
|
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour \
|
|
|
|
--output $R_DIST_NAME.asc \
|
|
|
|
--detach-sig $R_DIST_NAME
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
|
|
|
|
MD5 $R_DIST_NAME > \
|
|
|
|
$R_DIST_NAME.md5
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
|
|
|
|
SHA512 $R_DIST_NAME > \
|
|
|
|
$R_DIST_NAME.sha
|
2016-12-01 20:58:28 -05:00
|
|
|
else
|
|
|
|
echo "Creating distribution with PIP package"
|
|
|
|
./dev/make-distribution.sh --name $NAME --mvn $MVN_HOME/bin/mvn --tgz --pip $FLAGS \
|
|
|
|
-DzincPort=$ZINC_PORT 2>&1 > ../binary-release-$NAME.log
|
|
|
|
cd ..
|
|
|
|
|
|
|
|
echo "Copying and signing python distribution"
|
|
|
|
PYTHON_DIST_NAME=pyspark-$PYSPARK_VERSION.tar.gz
|
|
|
|
cp spark-$SPARK_VERSION-bin-$NAME/python/dist/$PYTHON_DIST_NAME .
|
|
|
|
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour \
|
|
|
|
--output $PYTHON_DIST_NAME.asc \
|
|
|
|
--detach-sig $PYTHON_DIST_NAME
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
|
|
|
|
MD5 $PYTHON_DIST_NAME > \
|
|
|
|
$PYTHON_DIST_NAME.md5
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
|
|
|
|
SHA512 $PYTHON_DIST_NAME > \
|
|
|
|
$PYTHON_DIST_NAME.sha
|
|
|
|
fi
|
2016-11-16 17:22:15 -05:00
|
|
|
|
|
|
|
echo "Copying and signing regular binary distribution"
|
|
|
|
cp spark-$SPARK_VERSION-bin-$NAME/spark-$SPARK_VERSION-bin-$NAME.tgz .
|
2015-08-12 00:16:48 -04:00
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour \
|
|
|
|
--output spark-$SPARK_VERSION-bin-$NAME.tgz.asc \
|
|
|
|
--detach-sig spark-$SPARK_VERSION-bin-$NAME.tgz
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
|
|
|
|
MD5 spark-$SPARK_VERSION-bin-$NAME.tgz > \
|
|
|
|
spark-$SPARK_VERSION-bin-$NAME.tgz.md5
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
|
|
|
|
SHA512 spark-$SPARK_VERSION-bin-$NAME.tgz > \
|
|
|
|
spark-$SPARK_VERSION-bin-$NAME.tgz.sha
|
|
|
|
}
|
|
|
|
|
|
|
|
# TODO: Check exit codes of children here:
|
|
|
|
# http://stackoverflow.com/questions/1570262/shell-get-exit-code-of-background-process
|
|
|
|
|
|
|
|
# We increment the Zinc port each time to avoid OOM's and other craziness if multiple builds
|
|
|
|
# share the same Zinc server.
|
2017-09-13 05:10:40 -04:00
|
|
|
make_binary_release "hadoop2.6" "-Phadoop-2.6 $HIVE_PROFILES $SCALA_2_11_PROFILES $BASE_RELEASE_PROFILES" "3035" "withr" &
|
|
|
|
make_binary_release "hadoop2.7" "-Phadoop-2.7 $HIVE_PROFILES $SCALA_2_11_PROFILES $BASE_RELEASE_PROFILES" "3036" "withpip" &
|
|
|
|
make_binary_release "without-hadoop" "-Phadoop-provided $SCALA_2_11_PROFILES $BASE_RELEASE_PROFILES" "3038" &
|
2015-08-12 00:16:48 -04:00
|
|
|
wait
|
|
|
|
rm -rf spark-$SPARK_VERSION-bin-*/
|
|
|
|
|
|
|
|
# Copy data
|
|
|
|
dest_dir="$REMOTE_PARENT_DIR/${DEST_DIR_NAME}-bin"
|
|
|
|
echo "Copying release tarballs to $dest_dir"
|
2016-02-26 21:40:00 -05:00
|
|
|
# Put to new directory:
|
2017-03-27 13:23:28 -04:00
|
|
|
LFTP mkdir -p $dest_dir || true
|
2016-02-26 21:40:00 -05:00
|
|
|
LFTP mput -O $dest_dir 'spark-*'
|
2016-12-09 01:52:34 -05:00
|
|
|
LFTP mput -O $dest_dir 'pyspark-*'
|
2016-12-09 13:12:56 -05:00
|
|
|
LFTP mput -O $dest_dir 'SparkR_*'
|
2016-02-26 21:40:00 -05:00
|
|
|
# Delete /latest directory and rename new upload to /latest
|
|
|
|
LFTP "rm -r -f $REMOTE_PARENT_DIR/latest || exit 0"
|
|
|
|
LFTP mv $dest_dir "$REMOTE_PARENT_DIR/latest"
|
|
|
|
# Re-upload a second time and leave the files in the timestamped upload directory:
|
2017-03-27 13:23:28 -04:00
|
|
|
LFTP mkdir -p $dest_dir || true
|
2016-02-26 21:40:00 -05:00
|
|
|
LFTP mput -O $dest_dir 'spark-*'
|
2016-11-16 17:22:15 -05:00
|
|
|
LFTP mput -O $dest_dir 'pyspark-*'
|
2016-12-09 13:12:56 -05:00
|
|
|
LFTP mput -O $dest_dir 'SparkR_*'
|
2015-08-12 00:16:48 -04:00
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ "$1" == "docs" ]]; then
|
|
|
|
# Documentation
|
|
|
|
cd spark
|
|
|
|
echo "Building Spark docs"
|
|
|
|
dest_dir="$REMOTE_PARENT_DIR/${DEST_DIR_NAME}-docs"
|
|
|
|
cd docs
|
|
|
|
# TODO: Make configurable to add this: PRODUCTION=1
|
|
|
|
PRODUCTION=1 RELEASE_VERSION="$SPARK_VERSION" jekyll build
|
|
|
|
echo "Copying release documentation to $dest_dir"
|
2016-02-26 21:40:00 -05:00
|
|
|
# Put to new directory:
|
2017-03-27 13:23:28 -04:00
|
|
|
LFTP mkdir -p $dest_dir || true
|
2016-02-26 21:40:00 -05:00
|
|
|
LFTP mirror -R _site $dest_dir
|
|
|
|
# Delete /latest directory and rename new upload to /latest
|
|
|
|
LFTP "rm -r -f $REMOTE_PARENT_DIR/latest || exit 0"
|
|
|
|
LFTP mv $dest_dir "$REMOTE_PARENT_DIR/latest"
|
|
|
|
# Re-upload a second time and leave the files in the timestamped upload directory:
|
2017-03-27 13:23:28 -04:00
|
|
|
LFTP mkdir -p $dest_dir || true
|
2016-02-26 21:40:00 -05:00
|
|
|
LFTP mirror -R _site $dest_dir
|
2015-08-12 00:16:48 -04:00
|
|
|
cd ..
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ "$1" == "publish-snapshot" ]]; then
|
|
|
|
cd spark
|
|
|
|
# Publish Spark to Maven release repo
|
|
|
|
echo "Deploying Spark SNAPSHOT at '$GIT_REF' ($git_hash)"
|
|
|
|
echo "Publish version is $SPARK_VERSION"
|
|
|
|
if [[ ! $SPARK_VERSION == *"SNAPSHOT"* ]]; then
|
|
|
|
echo "ERROR: Snapshots must have a version containing SNAPSHOT"
|
|
|
|
echo "ERROR: You gave version '$SPARK_VERSION'"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
# Coerce the requested version
|
|
|
|
$MVN versions:set -DnewVersion=$SPARK_VERSION
|
|
|
|
tmp_settings="tmp-settings.xml"
|
|
|
|
echo "<settings><servers><server>" > $tmp_settings
|
|
|
|
echo "<id>apache.snapshots.https</id><username>$ASF_USERNAME</username>" >> $tmp_settings
|
|
|
|
echo "<password>$ASF_PASSWORD</password>" >> $tmp_settings
|
|
|
|
echo "</server></servers></settings>" >> $tmp_settings
|
|
|
|
|
|
|
|
# Generate random point for Zinc
|
|
|
|
export ZINC_PORT=$(python -S -c "import random; print random.randrange(3030,4030)")
|
|
|
|
|
2017-09-13 05:10:40 -04:00
|
|
|
$MVN -DzincPort=$ZINC_PORT --settings $tmp_settings -DskipTests $SCALA_2_11_PROFILES $PUBLISH_PROFILES deploy
|
2017-07-13 05:06:24 -04:00
|
|
|
#./dev/change-scala-version.sh 2.12
|
2017-09-13 05:10:40 -04:00
|
|
|
#$MVN -DzincPort=$ZINC_PORT --settings $tmp_settings \
|
|
|
|
# -DskipTests $SCALA_2_12_PROFILES $PUBLISH_PROFILES clean deploy
|
2015-08-12 00:16:48 -04:00
|
|
|
|
|
|
|
# Clean-up Zinc nailgun process
|
|
|
|
/usr/sbin/lsof -P |grep $ZINC_PORT | grep LISTEN | awk '{ print $2; }' | xargs kill
|
|
|
|
|
|
|
|
rm $tmp_settings
|
|
|
|
cd ..
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ "$1" == "publish-release" ]]; then
|
|
|
|
cd spark
|
|
|
|
# Publish Spark to Maven release repo
|
|
|
|
echo "Publishing Spark checkout at '$GIT_REF' ($git_hash)"
|
|
|
|
echo "Publish version is $SPARK_VERSION"
|
|
|
|
# Coerce the requested version
|
|
|
|
$MVN versions:set -DnewVersion=$SPARK_VERSION
|
|
|
|
|
|
|
|
# Using Nexus API documented here:
|
|
|
|
# https://support.sonatype.com/entries/39720203-Uploading-to-a-Staging-Repository-via-REST-API
|
|
|
|
echo "Creating Nexus staging repository"
|
|
|
|
repo_request="<promoteRequest><data><description>Apache Spark $SPARK_VERSION (commit $git_hash)</description></data></promoteRequest>"
|
|
|
|
out=$(curl -X POST -d "$repo_request" -u $ASF_USERNAME:$ASF_PASSWORD \
|
|
|
|
-H "Content-Type:application/xml" -v \
|
|
|
|
$NEXUS_ROOT/profiles/$NEXUS_PROFILE/start)
|
|
|
|
staged_repo_id=$(echo $out | sed -e "s/.*\(orgapachespark-[0-9]\{4\}\).*/\1/")
|
|
|
|
echo "Created Nexus staging repository: $staged_repo_id"
|
|
|
|
|
|
|
|
tmp_repo=$(mktemp -d spark-repo-XXXXX)
|
|
|
|
|
|
|
|
# Generate random point for Zinc
|
|
|
|
export ZINC_PORT=$(python -S -c "import random; print random.randrange(3030,4030)")
|
|
|
|
|
2017-09-13 05:10:40 -04:00
|
|
|
$MVN -DzincPort=$ZINC_PORT -Dmaven.repo.local=$tmp_repo -DskipTests $SCALA_2_11_PROFILES $PUBLISH_PROFILES clean install
|
2015-08-12 00:16:48 -04:00
|
|
|
|
2017-07-13 05:06:24 -04:00
|
|
|
#./dev/change-scala-version.sh 2.12
|
2017-09-13 05:10:40 -04:00
|
|
|
#$MVN -DzincPort=$ZINC_PORT -Dmaven.repo.local=$tmp_repo \
|
|
|
|
# -DskipTests $SCALA_2_12_PROFILES §$PUBLISH_PROFILES clean install
|
2015-08-12 00:16:48 -04:00
|
|
|
|
|
|
|
# Clean-up Zinc nailgun process
|
|
|
|
/usr/sbin/lsof -P |grep $ZINC_PORT | grep LISTEN | awk '{ print $2; }' | xargs kill
|
|
|
|
|
2017-09-01 14:21:21 -04:00
|
|
|
#./dev/change-scala-version.sh 2.11
|
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
pushd $tmp_repo/org/apache/spark
|
|
|
|
|
|
|
|
# Remove any extra files generated during install
|
|
|
|
find . -type f |grep -v \.jar |grep -v \.pom | xargs rm
|
|
|
|
|
|
|
|
echo "Creating hash and signature files"
|
|
|
|
for file in $(find . -type f)
|
|
|
|
do
|
|
|
|
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --output $file.asc \
|
|
|
|
--detach-sig --armour $file;
|
|
|
|
if [ $(command -v md5) ]; then
|
|
|
|
# Available on OS X; -q to keep only hash
|
|
|
|
md5 -q $file > $file.md5
|
|
|
|
else
|
|
|
|
# Available on Linux; cut to keep only hash
|
|
|
|
md5sum $file | cut -f1 -d' ' > $file.md5
|
|
|
|
fi
|
|
|
|
sha1sum $file | cut -f1 -d' ' > $file.sha1
|
|
|
|
done
|
|
|
|
|
|
|
|
nexus_upload=$NEXUS_ROOT/deployByRepositoryId/$staged_repo_id
|
|
|
|
echo "Uplading files to $nexus_upload"
|
|
|
|
for file in $(find . -type f)
|
|
|
|
do
|
|
|
|
# strip leading ./
|
|
|
|
file_short=$(echo $file | sed -e "s/\.\///")
|
|
|
|
dest_url="$nexus_upload/org/apache/spark/$file_short"
|
|
|
|
echo " Uploading $file_short"
|
|
|
|
curl -u $ASF_USERNAME:$ASF_PASSWORD --upload-file $file_short $dest_url
|
|
|
|
done
|
|
|
|
|
|
|
|
echo "Closing nexus staging repository"
|
|
|
|
repo_request="<promoteRequest><data><stagedRepositoryId>$staged_repo_id</stagedRepositoryId><description>Apache Spark $SPARK_VERSION (commit $git_hash)</description></data></promoteRequest>"
|
|
|
|
out=$(curl -X POST -d "$repo_request" -u $ASF_USERNAME:$ASF_PASSWORD \
|
|
|
|
-H "Content-Type:application/xml" -v \
|
|
|
|
$NEXUS_ROOT/profiles/$NEXUS_PROFILE/finish)
|
|
|
|
echo "Closed Nexus staging repository: $staged_repo_id"
|
|
|
|
popd
|
|
|
|
rm -rf $tmp_repo
|
|
|
|
cd ..
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
cd ..
|
|
|
|
rm -rf spark
|
|
|
|
echo "ERROR: expects to be called with 'package', 'docs', 'publish-release' or 'publish-snapshot'"
|