2015-08-12 00:16:48 -04:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
2018-06-22 13:38:34 -04:00
|
|
|
SELF=$(cd $(dirname $0) && pwd)
|
|
|
|
. "$SELF/release-util.sh"
|
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
function exit_with_usage {
|
2019-01-07 01:45:18 -05:00
|
|
|
local NAME=$(basename $0)
|
2015-08-12 00:16:48 -04:00
|
|
|
cat << EOF
|
2019-01-07 01:45:18 -05:00
|
|
|
usage: $NAME
|
2015-08-12 00:16:48 -04:00
|
|
|
Tags a Spark release on a particular branch.
|
|
|
|
|
|
|
|
Inputs are specified with the following environment variables:
|
|
|
|
ASF_USERNAME - Apache Username
|
|
|
|
ASF_PASSWORD - Apache Password
|
|
|
|
GIT_NAME - Name to use with git
|
|
|
|
GIT_EMAIL - E-mail address to use with git
|
|
|
|
GIT_BRANCH - Git branch on which to make release
|
|
|
|
RELEASE_VERSION - Version used in pom files for release
|
|
|
|
RELEASE_TAG - Name of release tag
|
|
|
|
NEXT_VERSION - Development version after release
|
|
|
|
EOF
|
|
|
|
exit 1
|
|
|
|
}
|
|
|
|
|
|
|
|
set -e
|
2018-06-22 13:38:34 -04:00
|
|
|
set -o pipefail
|
2015-08-12 00:16:48 -04:00
|
|
|
|
|
|
|
if [[ $@ == *"help"* ]]; then
|
|
|
|
exit_with_usage
|
|
|
|
fi
|
|
|
|
|
2017-12-09 10:28:46 -05:00
|
|
|
if [[ -z "$ASF_PASSWORD" ]]; then
|
|
|
|
echo 'The environment variable ASF_PASSWORD is not set. Enter the password.'
|
|
|
|
echo
|
|
|
|
stty -echo && printf "ASF password: " && read ASF_PASSWORD && printf '\n' && stty echo
|
|
|
|
fi
|
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
for env in ASF_USERNAME ASF_PASSWORD RELEASE_VERSION RELEASE_TAG NEXT_VERSION GIT_EMAIL GIT_NAME GIT_BRANCH; do
|
|
|
|
if [ -z "${!env}" ]; then
|
|
|
|
echo "$env must be set to run this script"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2018-06-22 13:38:34 -04:00
|
|
|
init_java
|
|
|
|
init_maven_sbt
|
|
|
|
|
2019-01-04 21:27:26 -05:00
|
|
|
ASF_SPARK_REPO="gitbox.apache.org/repos/asf/spark.git"
|
2015-08-12 00:16:48 -04:00
|
|
|
|
|
|
|
rm -rf spark
|
2017-12-09 10:28:46 -05:00
|
|
|
git clone "https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_SPARK_REPO" -b $GIT_BRANCH
|
2015-08-12 00:16:48 -04:00
|
|
|
cd spark
|
|
|
|
|
|
|
|
git config user.name "$GIT_NAME"
|
|
|
|
git config user.email $GIT_EMAIL
|
|
|
|
|
|
|
|
# Create release version
|
|
|
|
$MVN versions:set -DnewVersion=$RELEASE_VERSION | grep -v "no value" # silence logs
|
[SPARK-30265][INFRA] Do not change R version when releasing preview versions
### What changes were proposed in this pull request?
This PR makes it do not change R version when releasing preview versions.
### Why are the changes needed?
Failed to make Spark binary distribution:
```
++ . /opt/spark-rm/output/spark-3.0.0-preview2-bin-hadoop2.7/R/find-r.sh
+++ '[' -z /usr/bin ']'
++ /usr/bin/Rscript -e ' if("devtools" %in% rownames(installed.packages())) { library(devtools); devtools::document(pkg="./pkg", roclets=c("rd")) }'
Loading required package: usethis
Updating SparkR documentation
First time using roxygen2. Upgrading automatically...
Loading SparkR
Invalid DESCRIPTION:
Malformed package version.
See section 'The DESCRIPTION file' in the 'Writing R Extensions'
manual.
Error: invalid version specification '3.0.0-preview2'
In addition: Warning message:
roxygen2 requires Encoding: UTF-8
Execution halted
[ERROR] Command execution failed.
org.apache.commons.exec.ExecuteException: Process exited with an error: 1 (Exit value: 1)
at org.apache.commons.exec.DefaultExecutor.executeInternal (DefaultExecutor.java:404)
at org.apache.commons.exec.DefaultExecutor.execute (DefaultExecutor.java:166)
at org.codehaus.mojo.exec.ExecMojo.executeCommandLine (ExecMojo.java:804)
at org.codehaus.mojo.exec.ExecMojo.executeCommandLine (ExecMojo.java:751)
at org.codehaus.mojo.exec.ExecMojo.execute (ExecMojo.java:313)
at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:137)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:210)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:156)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:148)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:56)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:305)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:192)
at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:105)
at org.apache.maven.cli.MavenCli.execute (MavenCli.java:957)
at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:289)
at org.apache.maven.cli.MavenCli.main (MavenCli.java:193)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:498)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:282)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:225)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:406)
at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:347)
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary for Spark Project Parent POM 3.0.0-preview2:
[INFO]
[INFO] Spark Project Parent POM ........................... SUCCESS [ 18.619 s]
[INFO] Spark Project Tags ................................. SUCCESS [ 13.652 s]
[INFO] Spark Project Sketch ............................... SUCCESS [ 5.673 s]
[INFO] Spark Project Local DB ............................. SUCCESS [ 2.081 s]
[INFO] Spark Project Networking ........................... SUCCESS [ 3.509 s]
[INFO] Spark Project Shuffle Streaming Service ............ SUCCESS [ 0.993 s]
[INFO] Spark Project Unsafe ............................... SUCCESS [ 7.556 s]
[INFO] Spark Project Launcher ............................. SUCCESS [ 5.522 s]
[INFO] Spark Project Core ................................. FAILURE [01:06 min]
[INFO] Spark Project ML Local Library ..................... SKIPPED
[INFO] Spark Project GraphX ............................... SKIPPED
[INFO] Spark Project Streaming ............................ SKIPPED
[INFO] Spark Project Catalyst ............................. SKIPPED
[INFO] Spark Project SQL .................................. SKIPPED
[INFO] Spark Project ML Library ........................... SKIPPED
[INFO] Spark Project Tools ................................ SKIPPED
[INFO] Spark Project Hive ................................. SKIPPED
[INFO] Spark Project Graph API ............................ SKIPPED
[INFO] Spark Project Cypher ............................... SKIPPED
[INFO] Spark Project Graph ................................ SKIPPED
[INFO] Spark Project REPL ................................. SKIPPED
[INFO] Spark Project Assembly ............................. SKIPPED
[INFO] Kafka 0.10+ Token Provider for Streaming ........... SKIPPED
[INFO] Spark Integration for Kafka 0.10 ................... SKIPPED
[INFO] Kafka 0.10+ Source for Structured Streaming ........ SKIPPED
[INFO] Spark Project Examples ............................. SKIPPED
[INFO] Spark Integration for Kafka 0.10 Assembly .......... SKIPPED
[INFO] Spark Avro ......................................... SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:04 min
[INFO] Finished at: 2019-12-16T08:02:45Z
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.codehaus.mojo:exec-maven-plugin:1.6.0:exec (sparkr-pkg) on project spark-core_2.12: Command execution failed.: Process exited with an error: 1 (Exit value: 1) -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoExecutionException
[ERROR]
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR] mvn <args> -rf :spark-core_2.12
```
### Does this PR introduce any user-facing change?
No.
### How was this patch tested?
manual test:
```diff
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index cdb59093781..b648c51e010 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
-336,8 +336,8 sparkR.session <- function(
# Check if version number of SparkSession matches version number of SparkR package
jvmVersion <- callJMethod(sparkSession, "version")
- # Remove -SNAPSHOT from jvm versions
- jvmVersionStrip <- gsub("-SNAPSHOT", "", jvmVersion)
+ # Remove -preview2 from jvm versions
+ jvmVersionStrip <- gsub("-preview2", "", jvmVersion)
rPackageVersion <- paste0(packageVersion("SparkR"))
if (jvmVersionStrip != rPackageVersion) {
```
Closes #26904 from wangyum/SPARK-30265.
Authored-by: Yuming Wang <yumwang@ebay.com>
Signed-off-by: Yuming Wang <wgyumg@gmail.com>
2019-12-16 06:54:12 -05:00
|
|
|
if [[ $RELEASE_VERSION != *"preview"* ]]; then
|
|
|
|
# Set the release version in R/pkg/DESCRIPTION
|
|
|
|
sed -i".tmp1" 's/Version.*$/Version: '"$RELEASE_VERSION"'/g' R/pkg/DESCRIPTION
|
|
|
|
else
|
|
|
|
sed -i".tmp1" 's/-SNAPSHOT/'"-$(cut -d "-" -f 2 <<< $RELEASE_VERSION)"'/g' R/pkg/R/sparkR.R
|
|
|
|
fi
|
2016-09-23 17:35:18 -04:00
|
|
|
# Set the release version in docs
|
|
|
|
sed -i".tmp1" 's/SPARK_VERSION:.*$/SPARK_VERSION: '"$RELEASE_VERSION"'/g' docs/_config.yml
|
|
|
|
sed -i".tmp2" 's/SPARK_VERSION_SHORT:.*$/SPARK_VERSION_SHORT: '"$RELEASE_VERSION"'/g' docs/_config.yml
|
2016-11-16 17:22:15 -05:00
|
|
|
sed -i".tmp3" 's/__version__ = .*$/__version__ = "'"$RELEASE_VERSION"'"/' python/pyspark/version.py
|
2016-09-23 17:35:18 -04:00
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
git commit -a -m "Preparing Spark release $RELEASE_TAG"
|
|
|
|
echo "Creating tag $RELEASE_TAG at the head of $GIT_BRANCH"
|
|
|
|
git tag $RELEASE_TAG
|
|
|
|
|
|
|
|
# Create next version
|
|
|
|
$MVN versions:set -DnewVersion=$NEXT_VERSION | grep -v "no value" # silence logs
|
2016-09-23 17:35:18 -04:00
|
|
|
# Remove -SNAPSHOT before setting the R version as R expects version strings to only have numbers
|
|
|
|
R_NEXT_VERSION=`echo $NEXT_VERSION | sed 's/-SNAPSHOT//g'`
|
2016-11-16 17:22:15 -05:00
|
|
|
sed -i".tmp4" 's/Version.*$/Version: '"$R_NEXT_VERSION"'/g' R/pkg/DESCRIPTION
|
|
|
|
# Write out the R_NEXT_VERSION to PySpark version info we use dev0 instead of SNAPSHOT to be closer
|
|
|
|
# to PEP440.
|
|
|
|
sed -i".tmp5" 's/__version__ = .*$/__version__ = "'"$R_NEXT_VERSION.dev0"'"/' python/pyspark/version.py
|
|
|
|
|
2016-09-23 17:35:18 -04:00
|
|
|
|
|
|
|
# Update docs with next version
|
2016-11-16 17:22:15 -05:00
|
|
|
sed -i".tmp6" 's/SPARK_VERSION:.*$/SPARK_VERSION: '"$NEXT_VERSION"'/g' docs/_config.yml
|
2016-09-23 17:35:18 -04:00
|
|
|
# Use R version for short version
|
2016-11-16 17:22:15 -05:00
|
|
|
sed -i".tmp7" 's/SPARK_VERSION_SHORT:.*$/SPARK_VERSION_SHORT: '"$R_NEXT_VERSION"'/g' docs/_config.yml
|
2016-09-23 17:35:18 -04:00
|
|
|
|
2015-08-12 00:16:48 -04:00
|
|
|
git commit -a -m "Preparing development version $NEXT_VERSION"
|
|
|
|
|
2020-06-12 05:50:43 -04:00
|
|
|
if ! is_dry_run; then
|
|
|
|
# Push changes
|
|
|
|
git push origin $RELEASE_TAG
|
|
|
|
if [[ $RELEASE_VERSION != *"preview"* ]]; then
|
|
|
|
git push origin HEAD:$GIT_BRANCH
|
|
|
|
else
|
|
|
|
echo "It's preview release. We only push $RELEASE_TAG to remote."
|
|
|
|
fi
|
|
|
|
|
|
|
|
cd ..
|
|
|
|
rm -rf spark
|
|
|
|
else
|
|
|
|
cd ..
|
2018-06-22 13:38:34 -04:00
|
|
|
mv spark spark.tag
|
|
|
|
echo "Clone with version changes and tag available as spark.tag in the output directory."
|
|
|
|
fi
|