[SPARK-27158][BUILD] dev/mima and dev/scalastyle support dynamic profiles

## What changes were proposed in this pull request?

`dev/mima` and `dev/scalastyle` support dynamic reading profiles from `modules.py`.

## How was this patch tested?

manual tests

Closes #24089 from wangyum/SPARK-27158.

Authored-by: Yuming Wang <yumwang@ebay.com>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
This commit is contained in:
Yuming Wang 2019-03-15 08:20:42 +09:00 committed by Hyukjin Kwon
parent 8b5224097b
commit f0b6245ea4
5 changed files with 18 additions and 16 deletions

View file

@ -20,4 +20,4 @@
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
SPARK_ROOT_DIR="$(dirname $SCRIPT_DIR)" SPARK_ROOT_DIR="$(dirname $SCRIPT_DIR)"
"$SCRIPT_DIR/scalastyle" "$SCRIPT_DIR/scalastyle" "$1"

View file

@ -24,7 +24,7 @@ set -e
FWDIR="$(cd "`dirname "$0"`"/..; pwd)" FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
cd "$FWDIR" cd "$FWDIR"
SPARK_PROFILES="-Pmesos -Pkubernetes -Pyarn -Pspark-ganglia-lgpl -Pkinesis-asl -Phive-thriftserver -Phive" SPARK_PROFILES=${1:-"-Pmesos -Pkubernetes -Pyarn -Pspark-ganglia-lgpl -Pkinesis-asl -Phive-thriftserver -Phive"}
TOOLS_CLASSPATH="$(build/sbt -DcopyDependencies=false "export tools/fullClasspath" | tail -n1)" TOOLS_CLASSPATH="$(build/sbt -DcopyDependencies=false "export tools/fullClasspath" | tail -n1)"
OLD_DEPS_CLASSPATH="$(build/sbt -DcopyDependencies=false $SPARK_PROFILES "export oldDeps/fullClasspath" | tail -n1)" OLD_DEPS_CLASSPATH="$(build/sbt -DcopyDependencies=false $SPARK_PROFILES "export oldDeps/fullClasspath" | tail -n1)"

View file

@ -175,9 +175,11 @@ def run_apache_rat_checks():
run_cmd([os.path.join(SPARK_HOME, "dev", "check-license")]) run_cmd([os.path.join(SPARK_HOME, "dev", "check-license")])
def run_scala_style_checks(): def run_scala_style_checks(build_profiles):
set_title_and_block("Running Scala style checks", "BLOCK_SCALA_STYLE") set_title_and_block("Running Scala style checks", "BLOCK_SCALA_STYLE")
run_cmd([os.path.join(SPARK_HOME, "dev", "lint-scala")]) profiles = " ".join(build_profiles)
print("[info] Checking Scala style using SBT with these profiles: ", profiles)
run_cmd([os.path.join(SPARK_HOME, "dev", "lint-scala"), profiles])
def run_java_style_checks(build_profiles): def run_java_style_checks(build_profiles):
@ -359,7 +361,10 @@ def build_apache_spark(build_tool, hadoop_version):
def detect_binary_inop_with_mima(hadoop_version): def detect_binary_inop_with_mima(hadoop_version):
build_profiles = get_hadoop_profiles(hadoop_version) + modules.root.build_profile_flags build_profiles = get_hadoop_profiles(hadoop_version) + modules.root.build_profile_flags
set_title_and_block("Detecting binary incompatibilities with MiMa", "BLOCK_MIMA") set_title_and_block("Detecting binary incompatibilities with MiMa", "BLOCK_MIMA")
run_cmd([os.path.join(SPARK_HOME, "dev", "mima")] + build_profiles) profiles = " ".join(build_profiles)
print("[info] Detecting binary incompatibilities with MiMa using SBT with these profiles: ",
profiles)
run_cmd([os.path.join(SPARK_HOME, "dev", "mima"), profiles])
def run_scala_tests_maven(test_profiles): def run_scala_tests_maven(test_profiles):
@ -582,7 +587,8 @@ def main():
if not changed_files or any(f.endswith(".scala") if not changed_files or any(f.endswith(".scala")
or f.endswith("scalastyle-config.xml") or f.endswith("scalastyle-config.xml")
for f in changed_files): for f in changed_files):
run_scala_style_checks() build_profiles = get_hadoop_profiles(hadoop_version) + modules.root.build_profile_flags
run_scala_style_checks(build_profiles)
should_run_java_style_checks = False should_run_java_style_checks = False
if not changed_files or any(f.endswith(".java") if not changed_files or any(f.endswith(".java")
or f.endswith("checkstyle.xml") or f.endswith("checkstyle.xml")

View file

@ -17,12 +17,12 @@
# limitations under the License. # limitations under the License.
# #
profiles=${1:-"-Pkinesis-asl -Pmesos -Pkubernetes -Pyarn -Phive -Phive-thriftserver"} SPARK_PROFILES=${1:-"-Pkinesis-asl -Pmesos -Pkubernetes -Pyarn -Phive -Phive-thriftserver"}
# NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file # NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file
# with failure (either resolution or compilation); the "q" makes SBT quit. # with failure (either resolution or compilation); the "q" makes SBT quit.
ERRORS=$(echo -e "q\n" \ ERRORS=$(echo -e "q\n" \
| build/sbt ${profiles} checkstyle test:checkstyle \ | build/sbt ${SPARK_PROFILES} checkstyle test:checkstyle \
| awk '{if($1~/error/)print}' \ | awk '{if($1~/error/)print}' \
) )

View file

@ -17,17 +17,13 @@
# limitations under the License. # limitations under the License.
# #
SPARK_PROFILES=${1:-"-Pmesos -Pkubernetes -Pyarn -Pspark-ganglia-lgpl -Pkinesis-asl -Phive-thriftserver -Phive"}
# NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file # NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file
# with failure (either resolution or compilation); the "q" makes SBT quit. # with failure (either resolution or compilation); the "q" makes SBT quit.
ERRORS=$(echo -e "q\n" \ ERRORS=$(echo -e "q\n" \
| build/sbt \ | build/sbt \
-Pkinesis-asl \ ${SPARK_PROFILES} \
-Pmesos \
-Pkubernetes \
-Pyarn \
-Phive \
-Phive-thriftserver \
-Pspark-ganglia-lgpl \
-Pdocker-integration-tests \ -Pdocker-integration-tests \
-Pkubernetes-integration-tests \ -Pkubernetes-integration-tests \
scalastyle test:scalastyle \ scalastyle test:scalastyle \