spark-instrumented-optimizer/dev/audit-release/audit_release.py

239 lines
7.6 KiB
Python
Raw Normal View History

#!/usr/bin/python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Audits binary and maven artifacts for a Spark release.
# Requires GPG and Maven.
# usage:
# python audit_release.py
import os
import re
import shutil
import subprocess
import sys
import time
import urllib2
# Fill in release details here:
RELEASE_URL = "http://people.apache.org/~pwendell/spark-1.0.0-rc1/"
RELEASE_KEY = "9E4FE3AF"
RELEASE_REPOSITORY = "https://repository.apache.org/content/repositories/orgapachespark-1006/"
RELEASE_VERSION = "1.0.0"
SCALA_VERSION = "2.10.4"
SCALA_BINARY_VERSION = "2.10"
#
LOG_FILE_NAME = "spark_audit_%s" % time.strftime("%h_%m_%Y_%I_%M_%S")
LOG_FILE = open(LOG_FILE_NAME, 'w')
WORK_DIR = "/tmp/audit_%s" % int(time.time())
MAVEN_CMD = "mvn"
GPG_CMD = "gpg"
print "Starting tests, log output in %s. Test results printed below:" % LOG_FILE_NAME
# Track failures
failures = []
def clean_work_files():
print "OK to delete scratch directory '%s'? (y/N): " % WORK_DIR
response = raw_input()
if response == "y":
shutil.rmtree(WORK_DIR)
print "Should I delete the log output file '%s'? (y/N): " % LOG_FILE_NAME
response = raw_input()
if response == "y":
os.unlink(LOG_FILE_NAME)
def run_cmd(cmd, exit_on_failure=True):
print >> LOG_FILE, "Running command: %s" % cmd
ret = subprocess.call(cmd, shell=True, stdout=LOG_FILE, stderr=LOG_FILE)
if ret != 0 and exit_on_failure:
print "Command failed: %s" % cmd
clean_work_files()
sys.exit(-1)
return ret
def run_cmd_with_output(cmd):
print >> sys.stderr, "Running command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=LOG_FILE)
def test(bool, str):
if bool:
return passed(str)
failed(str)
def passed(str):
print "[PASSED] %s" % str
def failed(str):
failures.append(str)
print "[**FAILED**] %s" % str
def get_url(url):
return urllib2.urlopen(url).read()
original_dir = os.getcwd()
# For each of these modules, we'll test an 'empty' application in sbt and
# maven that links against them. This will catch issues with messed up
# dependencies within those projects.
modules = [
"spark-core", "spark-bagel", "spark-mllib", "spark-streaming", "spark-repl",
"spark-graphx", "spark-streaming-flume", "spark-streaming-kafka",
"spark-streaming-mqtt", "spark-streaming-twitter", "spark-streaming-zeromq",
"spark-catalyst", "spark-sql", "spark-hive"
]
modules = map(lambda m: "%s_%s" % (m, SCALA_BINARY_VERSION), modules)
# Check for directories that might interfere with tests
local_ivy_spark = "~/.ivy2/local/org.apache.spark"
cache_ivy_spark = "~/.ivy2/cache/org.apache.spark"
local_maven_kafka = "~/.m2/repository/org/apache/kafka"
local_maven_kafka = "~/.m2/repository/org/apache/spark"
def ensure_path_not_present(x):
if os.path.exists(os.path.expanduser(x)):
print "Please remove %s, it can interfere with testing published artifacts." % x
sys.exit(-1)
map(ensure_path_not_present, [local_ivy_spark, cache_ivy_spark, local_maven_kafka])
# SBT build tests
os.chdir("blank_sbt_build")
os.environ["SPARK_VERSION"] = RELEASE_VERSION
os.environ["SCALA_VERSION"] = SCALA_VERSION
os.environ["SPARK_RELEASE_REPOSITORY"] = RELEASE_REPOSITORY
Clean up and simplify Spark configuration Over time as we've added more deployment modes, this have gotten a bit unwieldy with user-facing configuration options in Spark. Going forward we'll advise all users to run `spark-submit` to launch applications. This is a WIP patch but it makes the following improvements: 1. Improved `spark-env.sh.template` which was missing a lot of things users now set in that file. 2. Removes the shipping of SPARK_CLASSPATH, SPARK_JAVA_OPTS, and SPARK_LIBRARY_PATH to the executors on the cluster. This was an ugly hack. Instead it introduces config variables spark.executor.extraJavaOpts, spark.executor.extraLibraryPath, and spark.executor.extraClassPath. 3. Adds ability to set these same variables for the driver using `spark-submit`. 4. Allows you to load system properties from a `spark-defaults.conf` file when running `spark-submit`. This will allow setting both SparkConf options and other system properties utilized by `spark-submit`. 5. Made `SPARK_LOCAL_IP` an environment variable rather than a SparkConf property. This is more consistent with it being set on each node. Author: Patrick Wendell <pwendell@gmail.com> Closes #299 from pwendell/config-cleanup and squashes the following commits: 127f301 [Patrick Wendell] Improvements to testing a006464 [Patrick Wendell] Moving properties file template. b4b496c [Patrick Wendell] spark-defaults.properties -> spark-defaults.conf 0086939 [Patrick Wendell] Minor style fixes af09e3e [Patrick Wendell] Mention config file in docs and clean-up docs b16e6a2 [Patrick Wendell] Cleanup of spark-submit script and Scala quick start guide af0adf7 [Patrick Wendell] Automatically add user jar a56b125 [Patrick Wendell] Responses to Tom's review d50c388 [Patrick Wendell] Merge remote-tracking branch 'apache/master' into config-cleanup a762901 [Patrick Wendell] Fixing test failures ffa00fe [Patrick Wendell] Review feedback fda0301 [Patrick Wendell] Note 308f1f6 [Patrick Wendell] Properly escape quotes and other clean-up for YARN e83cd8f [Patrick Wendell] Changes to allow re-use of test applications be42f35 [Patrick Wendell] Handle case where SPARK_HOME is not set c2a2909 [Patrick Wendell] Test compile fixes 4ee6f9d [Patrick Wendell] Making YARN doc changes consistent afc9ed8 [Patrick Wendell] Cleaning up line limits and two compile errors. b08893b [Patrick Wendell] Additional improvements. ace4ead [Patrick Wendell] Responses to review feedback. b72d183 [Patrick Wendell] Review feedback for spark env file 46555c1 [Patrick Wendell] Review feedback and import clean-ups 437aed1 [Patrick Wendell] Small fix 761ebcd [Patrick Wendell] Library path and classpath for drivers 7cc70e4 [Patrick Wendell] Clean up terminology inside of spark-env script 5b0ba8e [Patrick Wendell] Don't ship executor envs 84cc5e5 [Patrick Wendell] Small clean-up 1f75238 [Patrick Wendell] SPARK_JAVA_OPTS --> SPARK_MASTER_OPTS for master settings 4982331 [Patrick Wendell] Remove SPARK_LIBRARY_PATH 6eaf7d0 [Patrick Wendell] executorJavaOpts 0faa3b6 [Patrick Wendell] Stash of adding config options in submit script and YARN ac2d65e [Patrick Wendell] Change spark.local.dir -> SPARK_LOCAL_DIRS
2014-04-21 13:26:33 -04:00
os.environ["SPARK_AUDIT_MASTER"] = "local"
for module in modules:
os.environ["SPARK_MODULE"] = module
ret = run_cmd("sbt clean update", exit_on_failure=False)
test(ret == 0, "sbt build against '%s' module" % module)
os.chdir(original_dir)
# SBT application tests
for app in ["sbt_app_core", "sbt_app_graphx", "sbt_app_streaming", "sbt_app_sql", "sbt_app_hive"]:
os.chdir(app)
ret = run_cmd("sbt clean run", exit_on_failure=False)
test(ret == 0, "sbt application (%s)" % app)
os.chdir(original_dir)
# Maven build tests
os.chdir("blank_maven_build")
for module in modules:
cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
'-Dspark.module="%s" clean compile' %
(MAVEN_CMD, RELEASE_REPOSITORY, RELEASE_VERSION, module))
ret = run_cmd(cmd, exit_on_failure=False)
test(ret == 0, "maven build against '%s' module" % module)
os.chdir(original_dir)
os.chdir("maven_app_core")
mvn_exec_cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
'-Dscala.binary.version="%s" clean compile '
'exec:java -Dexec.mainClass="SimpleApp"' %
(MAVEN_CMD, RELEASE_REPOSITORY, RELEASE_VERSION, SCALA_BINARY_VERSION))
ret = run_cmd(mvn_exec_cmd, exit_on_failure=False)
test(ret == 0, "maven application (core)")
os.chdir(original_dir)
# Binary artifact tests
if os.path.exists(WORK_DIR):
print "Working directory '%s' already exists" % WORK_DIR
sys.exit(-1)
os.mkdir(WORK_DIR)
os.chdir(WORK_DIR)
index_page = get_url(RELEASE_URL)
artifact_regex = r = re.compile("<a href=\"(.*.tgz)\">")
artifacts = r.findall(index_page)
for artifact in artifacts:
print "==== Verifying download integrity for artifact: %s ====" % artifact
artifact_url = "%s/%s" % (RELEASE_URL, artifact)
run_cmd("wget %s" % artifact_url)
key_file = "%s.asc" % artifact
run_cmd("wget %s/%s" % (RELEASE_URL, key_file))
run_cmd("wget %s%s" % (artifact_url, ".sha"))
# Verify signature
run_cmd("%s --keyserver pgp.mit.edu --recv-key %s" % (GPG_CMD, RELEASE_KEY))
run_cmd("%s %s" % (GPG_CMD, key_file))
passed("Artifact signature verified.")
# Verify md5
my_md5 = run_cmd_with_output("%s --print-md MD5 %s" % (GPG_CMD, artifact)).strip()
release_md5 = get_url("%s.md5" % artifact_url).strip()
test(my_md5 == release_md5, "Artifact MD5 verified.")
# Verify sha
my_sha = run_cmd_with_output("%s --print-md SHA512 %s" % (GPG_CMD, artifact)).strip()
release_sha = get_url("%s.sha" % artifact_url).strip()
test(my_sha == release_sha, "Artifact SHA verified.")
# Verify Apache required files
dir_name = artifact.replace(".tgz", "")
run_cmd("tar xvzf %s" % artifact)
base_files = os.listdir(dir_name)
test("CHANGES.txt" in base_files, "Tarball contains CHANGES.txt file")
test("NOTICE" in base_files, "Tarball contains NOTICE file")
test("LICENSE" in base_files, "Tarball contains LICENSE file")
os.chdir(WORK_DIR)
for artifact in artifacts:
print "==== Verifying build and tests for artifact: %s ====" % artifact
os.chdir(os.path.join(WORK_DIR, dir_name))
os.environ["MAVEN_OPTS"] = "-Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
# Verify build
print "==> Running build"
run_cmd("sbt assembly")
passed("sbt build successful")
run_cmd("%s package -DskipTests" % MAVEN_CMD)
passed("Maven build successful")
# Verify tests
print "==> Performing unit tests"
run_cmd("%s test" % MAVEN_CMD)
passed("Tests successful")
os.chdir(WORK_DIR)
clean_work_files()
if len(failures) == 0:
print "ALL TESTS PASSED"
else:
print "SOME TESTS DID NOT PASS"
for f in failures:
print f
os.chdir(original_dir)