f405de87c8
## What changes were proposed in this pull request? Currently, `build/mvn` provides a convenient option, `--force`, in order to use the recommended version of maven without changing PATH environment variable. However, there were two problems. - `dev/lint-java` does not use the newly installed maven. ```bash $ ./build/mvn --force clean $ ./dev/lint-java Using `mvn` from path: /usr/local/bin/mvn ``` - It's not easy to type `--force` option always. If '--force' option is used once, we had better prefer the installed maven recommended by Spark. This PR makes `build/mvn` check the existence of maven installed by `--force` option first. According to the comments, this PR aims to the followings: - Detect the maven version from `pom.xml`. - Install maven if there is no or old maven. - Remove `--force` option. ## How was this patch tested? Manual. ```bash $ ./build/mvn --force clean $ ./dev/lint-java Using `mvn` from path: /Users/dongjoon/spark/build/apache-maven-3.3.9/bin/mvn ... $ rm -rf ./build/apache-maven-3.3.9/ $ ./dev/lint-java Using `mvn` from path: /usr/local/bin/mvn ``` Author: Dongjoon Hyun <dongjoon@apache.org> Closes #12631 from dongjoon-hyun/SPARK-14867.
113 lines
3.7 KiB
Bash
Executable file
113 lines
3.7 KiB
Bash
Executable file
#!/usr/bin/env bash
|
|
|
|
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
set -e
|
|
|
|
FWDIR="$(cd "`dirname $0`"/..; pwd)"
|
|
cd "$FWDIR"
|
|
|
|
# Explicitly set locale in order to make `sort` output consistent across machines.
|
|
# See https://stackoverflow.com/questions/28881 for more details.
|
|
export LC_ALL=C
|
|
|
|
# TODO: This would be much nicer to do in SBT, once SBT supports Maven-style resolution.
|
|
|
|
# NOTE: These should match those in the release publishing script
|
|
HADOOP2_MODULE_PROFILES="-Phive-thriftserver -Pyarn -Phive"
|
|
MVN="build/mvn"
|
|
HADOOP_PROFILES=(
|
|
hadoop-2.2
|
|
hadoop-2.3
|
|
hadoop-2.4
|
|
hadoop-2.6
|
|
hadoop-2.7
|
|
)
|
|
|
|
# We'll switch the version to a temp. one, publish POMs using that new version, then switch back to
|
|
# the old version. We need to do this because the `dependency:build-classpath` task needs to
|
|
# resolve Spark's internal submodule dependencies.
|
|
|
|
# From http://stackoverflow.com/a/26514030
|
|
set +e
|
|
OLD_VERSION=$($MVN -q \
|
|
-Dexec.executable="echo" \
|
|
-Dexec.args='${project.version}' \
|
|
--non-recursive \
|
|
org.codehaus.mojo:exec-maven-plugin:1.3.1:exec)
|
|
if [ $? != 0 ]; then
|
|
echo -e "Error while getting version string from Maven:\n$OLD_VERSION"
|
|
exit 1
|
|
fi
|
|
set -e
|
|
TEMP_VERSION="spark-$(python -S -c "import random; print(random.randrange(100000, 999999))")"
|
|
|
|
function reset_version {
|
|
# Delete the temporary POMs that we wrote to the local Maven repo:
|
|
find "$HOME/.m2/" | grep "$TEMP_VERSION" | xargs rm -rf
|
|
|
|
# Restore the original version number:
|
|
$MVN -q versions:set -DnewVersion=$OLD_VERSION -DgenerateBackupPoms=false > /dev/null
|
|
}
|
|
trap reset_version EXIT
|
|
|
|
$MVN -q versions:set -DnewVersion=$TEMP_VERSION -DgenerateBackupPoms=false > /dev/null
|
|
|
|
# Generate manifests for each Hadoop profile:
|
|
for HADOOP_PROFILE in "${HADOOP_PROFILES[@]}"; do
|
|
echo "Performing Maven install for $HADOOP_PROFILE"
|
|
$MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE jar:jar jar:test-jar install:install clean -q
|
|
|
|
echo "Performing Maven validate for $HADOOP_PROFILE"
|
|
$MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE validate -q
|
|
|
|
echo "Generating dependency manifest for $HADOOP_PROFILE"
|
|
mkdir -p dev/pr-deps
|
|
$MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE dependency:build-classpath -pl assembly \
|
|
| grep "Building Spark Project Assembly" -A 5 \
|
|
| tail -n 1 | tr ":" "\n" | rev | cut -d "/" -f 1 | rev | sort \
|
|
| grep -v spark > dev/pr-deps/spark-deps-$HADOOP_PROFILE
|
|
done
|
|
|
|
if [[ $@ == **replace-manifest** ]]; then
|
|
echo "Replacing manifests and creating new files at dev/deps"
|
|
rm -rf dev/deps
|
|
mv dev/pr-deps dev/deps
|
|
exit 0
|
|
fi
|
|
|
|
for HADOOP_PROFILE in "${HADOOP_PROFILES[@]}"; do
|
|
set +e
|
|
dep_diff="$(
|
|
git diff \
|
|
--no-index \
|
|
dev/deps/spark-deps-$HADOOP_PROFILE \
|
|
dev/pr-deps/spark-deps-$HADOOP_PROFILE \
|
|
)"
|
|
set -e
|
|
if [ "$dep_diff" != "" ]; then
|
|
echo "Spark's published dependencies DO NOT MATCH the manifest file (dev/spark-deps)."
|
|
echo "To update the manifest file, run './dev/test-dependencies.sh --replace-manifest'."
|
|
echo "$dep_diff"
|
|
rm -rf dev/pr-deps
|
|
exit 1
|
|
fi
|
|
done
|
|
|
|
exit 0
|