2014-06-03 21:53:13 -04:00
|
|
|
#!/usr/bin/env python
|
2014-02-09 02:13:34 -05:00
|
|
|
|
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
2017-07-30 21:07:33 -04:00
|
|
|
# Utility for creating well-formed pull request merges and pushing them to Apache
|
|
|
|
# Spark.
|
|
|
|
# usage: ./merge_spark_pr.py (see config env vars below)
|
2014-02-09 02:13:34 -05:00
|
|
|
#
|
2017-07-30 21:07:33 -04:00
|
|
|
# This utility assumes you already have a local Spark git folder and that you
|
2014-05-27 00:40:52 -04:00
|
|
|
# have added remotes corresponding to both (i) the github apache Spark
|
2014-02-09 02:13:34 -05:00
|
|
|
# mirror and (ii) the apache git repo.
|
|
|
|
|
|
|
|
import json
|
|
|
|
import os
|
2014-04-27 18:41:57 -04:00
|
|
|
import re
|
2014-02-09 02:13:34 -05:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2018-01-16 19:25:10 -05:00
|
|
|
import traceback
|
2014-02-09 02:13:34 -05:00
|
|
|
import urllib2
|
|
|
|
|
2014-04-27 18:41:57 -04:00
|
|
|
try:
|
2014-05-27 00:40:52 -04:00
|
|
|
import jira.client
|
|
|
|
JIRA_IMPORTED = True
|
2014-04-27 18:41:57 -04:00
|
|
|
except ImportError:
|
2014-05-27 00:40:52 -04:00
|
|
|
JIRA_IMPORTED = False
|
2014-04-27 18:41:57 -04:00
|
|
|
|
2018-07-03 21:40:58 -04:00
|
|
|
if sys.version < '3':
|
[SPARK-23698][PYTHON] Resolve undefined names in Python 3
## What changes were proposed in this pull request?
Fix issues arising from the fact that builtins __file__, __long__, __raw_input()__, __unicode__, __xrange()__, etc. were all removed from Python 3. __Undefined names__ have the potential to raise [NameError](https://docs.python.org/3/library/exceptions.html#NameError) at runtime.
## How was this patch tested?
* $ __python2 -m flake8 . --count --select=E9,F82 --show-source --statistics__
* $ __python3 -m flake8 . --count --select=E9,F82 --show-source --statistics__
holdenk
flake8 testing of https://github.com/apache/spark on Python 3.6.3
$ __python3 -m flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics__
```
./dev/merge_spark_pr.py:98:14: F821 undefined name 'raw_input'
result = raw_input("\n%s (y/n): " % prompt)
^
./dev/merge_spark_pr.py:136:22: F821 undefined name 'raw_input'
primary_author = raw_input(
^
./dev/merge_spark_pr.py:186:16: F821 undefined name 'raw_input'
pick_ref = raw_input("Enter a branch name [%s]: " % default_branch)
^
./dev/merge_spark_pr.py:233:15: F821 undefined name 'raw_input'
jira_id = raw_input("Enter a JIRA id [%s]: " % default_jira_id)
^
./dev/merge_spark_pr.py:278:20: F821 undefined name 'raw_input'
fix_versions = raw_input("Enter comma-separated fix version(s) [%s]: " % default_fix_versions)
^
./dev/merge_spark_pr.py:317:28: F821 undefined name 'raw_input'
raw_assignee = raw_input(
^
./dev/merge_spark_pr.py:430:14: F821 undefined name 'raw_input'
pr_num = raw_input("Which pull request would you like to merge? (e.g. 34): ")
^
./dev/merge_spark_pr.py:442:18: F821 undefined name 'raw_input'
result = raw_input("Would you like to use the modified title? (y/n): ")
^
./dev/merge_spark_pr.py:493:11: F821 undefined name 'raw_input'
while raw_input("\n%s (y/n): " % pick_prompt).lower() == "y":
^
./dev/create-release/releaseutils.py:58:16: F821 undefined name 'raw_input'
response = raw_input("%s [y/n]: " % msg)
^
./dev/create-release/releaseutils.py:152:38: F821 undefined name 'unicode'
author = unidecode.unidecode(unicode(author, "UTF-8")).strip()
^
./python/setup.py:37:11: F821 undefined name '__version__'
VERSION = __version__
^
./python/pyspark/cloudpickle.py:275:18: F821 undefined name 'buffer'
dispatch[buffer] = save_buffer
^
./python/pyspark/cloudpickle.py:807:18: F821 undefined name 'file'
dispatch[file] = save_file
^
./python/pyspark/sql/conf.py:61:61: F821 undefined name 'unicode'
if not isinstance(obj, str) and not isinstance(obj, unicode):
^
./python/pyspark/sql/streaming.py:25:21: F821 undefined name 'long'
intlike = (int, long)
^
./python/pyspark/streaming/dstream.py:405:35: F821 undefined name 'long'
return self._sc._jvm.Time(long(timestamp * 1000))
^
./sql/hive/src/test/resources/data/scripts/dumpdata_script.py:21:10: F821 undefined name 'xrange'
for i in xrange(50):
^
./sql/hive/src/test/resources/data/scripts/dumpdata_script.py:22:14: F821 undefined name 'xrange'
for j in xrange(5):
^
./sql/hive/src/test/resources/data/scripts/dumpdata_script.py:23:18: F821 undefined name 'xrange'
for k in xrange(20022):
^
20 F821 undefined name 'raw_input'
20
```
Closes #20838 from cclauss/fix-undefined-names.
Authored-by: cclauss <cclauss@bluewin.ch>
Signed-off-by: Bryan Cutler <cutlerb@gmail.com>
2018-08-22 13:06:59 -04:00
|
|
|
input = raw_input # noqa
|
2018-07-03 21:40:58 -04:00
|
|
|
|
2014-02-09 02:13:34 -05:00
|
|
|
# Location of your Spark git development area
|
2014-07-31 17:35:09 -04:00
|
|
|
SPARK_HOME = os.environ.get("SPARK_HOME", os.getcwd())
|
2014-02-09 02:13:34 -05:00
|
|
|
# Remote name which points to the Gihub site
|
|
|
|
PR_REMOTE_NAME = os.environ.get("PR_REMOTE_NAME", "apache-github")
|
|
|
|
# Remote name which points to Apache git
|
|
|
|
PUSH_REMOTE_NAME = os.environ.get("PUSH_REMOTE_NAME", "apache")
|
2014-04-27 18:41:57 -04:00
|
|
|
# ASF JIRA username
|
2015-05-28 22:04:32 -04:00
|
|
|
JIRA_USERNAME = os.environ.get("JIRA_USERNAME", "")
|
2014-04-27 18:41:57 -04:00
|
|
|
# ASF JIRA password
|
2015-05-28 22:04:32 -04:00
|
|
|
JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", "")
|
2015-07-02 02:06:52 -04:00
|
|
|
# OAuth key used for issuing requests against the GitHub API. If this is not defined, then requests
|
|
|
|
# will be unauthenticated. You should only need to configure this if you find yourself regularly
|
|
|
|
# exceeding your IP's unauthenticated request rate limit. You can create an OAuth key at
|
|
|
|
# https://github.com/settings/tokens. This script only requires the "public_repo" scope.
|
|
|
|
GITHUB_OAUTH_KEY = os.environ.get("GITHUB_OAUTH_KEY")
|
|
|
|
|
2014-04-27 18:41:57 -04:00
|
|
|
|
|
|
|
GITHUB_BASE = "https://github.com/apache/spark/pull"
|
|
|
|
GITHUB_API_BASE = "https://api.github.com/repos/apache/spark"
|
|
|
|
JIRA_BASE = "https://issues.apache.org/jira/browse"
|
|
|
|
JIRA_API_BASE = "https://issues.apache.org/jira"
|
2014-02-09 02:13:34 -05:00
|
|
|
# Prefix added to temporary branches
|
|
|
|
BRANCH_PREFIX = "PR_TOOL"
|
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
|
2014-02-09 02:13:34 -05:00
|
|
|
def get_json(url):
|
2014-05-27 00:40:52 -04:00
|
|
|
try:
|
2015-07-02 02:06:52 -04:00
|
|
|
request = urllib2.Request(url)
|
|
|
|
if GITHUB_OAUTH_KEY:
|
|
|
|
request.add_header('Authorization', 'token %s' % GITHUB_OAUTH_KEY)
|
|
|
|
return json.load(urllib2.urlopen(request))
|
2014-05-27 00:40:52 -04:00
|
|
|
except urllib2.HTTPError as e:
|
2015-07-02 02:06:52 -04:00
|
|
|
if "X-RateLimit-Remaining" in e.headers and e.headers["X-RateLimit-Remaining"] == '0':
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Exceeded the GitHub API rate limit; see the instructions in " +
|
|
|
|
"dev/merge_spark_pr.py to configure an OAuth token for making authenticated " +
|
|
|
|
"GitHub requests.")
|
2015-07-02 02:06:52 -04:00
|
|
|
else:
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Unable to fetch URL, exiting: %s" % url)
|
2014-05-27 00:40:52 -04:00
|
|
|
sys.exit(-1)
|
|
|
|
|
2014-02-09 02:13:34 -05:00
|
|
|
|
|
|
|
def fail(msg):
|
2017-01-02 10:23:19 -05:00
|
|
|
print(msg)
|
2014-05-27 00:40:52 -04:00
|
|
|
clean_up()
|
|
|
|
sys.exit(-1)
|
|
|
|
|
2014-02-09 02:13:34 -05:00
|
|
|
|
|
|
|
def run_cmd(cmd):
|
2017-01-02 10:23:19 -05:00
|
|
|
print(cmd)
|
2014-05-27 00:40:52 -04:00
|
|
|
if isinstance(cmd, list):
|
|
|
|
return subprocess.check_output(cmd)
|
|
|
|
else:
|
|
|
|
return subprocess.check_output(cmd.split(" "))
|
|
|
|
|
2014-02-09 02:13:34 -05:00
|
|
|
|
|
|
|
def continue_maybe(prompt):
|
2018-07-03 21:40:58 -04:00
|
|
|
result = input("\n%s (y/n): " % prompt)
|
2014-05-27 00:40:52 -04:00
|
|
|
if result.lower() != "y":
|
|
|
|
fail("Okay, exiting")
|
|
|
|
|
2017-01-02 10:23:19 -05:00
|
|
|
|
2014-02-09 02:13:34 -05:00
|
|
|
def clean_up():
|
[MINOR][PROJECT-INFRA] Check if 'original_head' variable is defined in clean_up at merge script
## What changes were proposed in this pull request?
This PR proposes to check if global variable exists or not in clean_up. This can happen when it fails at:
https://github.com/apache/spark/blob/7013eea11cb32b1e0038dc751c485da5c94a484b/dev/merge_spark_pr.py#L423
I found this (It was my environment problem) but the error message took me a while to debug.
## How was this patch tested?
Manually tested:
**Before**
```
git rev-parse --abbrev-ref HEAD
fatal: Not a git repository (or any of the parent directories): .git
Traceback (most recent call last):
File "./dev/merge_spark_pr_jira.py", line 517, in <module>
clean_up()
File "./dev/merge_spark_pr_jira.py", line 104, in clean_up
print("Restoring head pointer to %s" % original_head)
NameError: global name 'original_head' is not defined
```
**After**
```
git rev-parse --abbrev-ref HEAD
fatal: Not a git repository (or any of the parent directories): .git
Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 516, in <module>
main()
File "./dev/merge_spark_pr.py", line 424, in main
original_head = get_current_ref()
File "./dev/merge_spark_pr.py", line 412, in get_current_ref
ref = run_cmd("git rev-parse --abbrev-ref HEAD").strip()
File "./dev/merge_spark_pr.py", line 94, in run_cmd
return subprocess.check_output(cmd.split(" "))
File "/usr/local/Cellar/python2/2.7.14_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/subprocess.py", line 219, in check_output
raise CalledProcessError(retcode, cmd, output=output)
subprocess.CalledProcessError: Command '['git', 'rev-parse', '--abbrev-ref', 'HEAD']' returned non-zero exit status 128
```
Author: hyukjinkwon <gurwls223@apache.org>
Closes #21349 from HyukjinKwon/minor-merge-script.
2018-05-20 21:47:52 -04:00
|
|
|
if 'original_head' in globals():
|
|
|
|
print("Restoring head pointer to %s" % original_head)
|
|
|
|
run_cmd("git checkout %s" % original_head)
|
2014-05-27 00:40:52 -04:00
|
|
|
|
[MINOR][PROJECT-INFRA] Check if 'original_head' variable is defined in clean_up at merge script
## What changes were proposed in this pull request?
This PR proposes to check if global variable exists or not in clean_up. This can happen when it fails at:
https://github.com/apache/spark/blob/7013eea11cb32b1e0038dc751c485da5c94a484b/dev/merge_spark_pr.py#L423
I found this (It was my environment problem) but the error message took me a while to debug.
## How was this patch tested?
Manually tested:
**Before**
```
git rev-parse --abbrev-ref HEAD
fatal: Not a git repository (or any of the parent directories): .git
Traceback (most recent call last):
File "./dev/merge_spark_pr_jira.py", line 517, in <module>
clean_up()
File "./dev/merge_spark_pr_jira.py", line 104, in clean_up
print("Restoring head pointer to %s" % original_head)
NameError: global name 'original_head' is not defined
```
**After**
```
git rev-parse --abbrev-ref HEAD
fatal: Not a git repository (or any of the parent directories): .git
Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 516, in <module>
main()
File "./dev/merge_spark_pr.py", line 424, in main
original_head = get_current_ref()
File "./dev/merge_spark_pr.py", line 412, in get_current_ref
ref = run_cmd("git rev-parse --abbrev-ref HEAD").strip()
File "./dev/merge_spark_pr.py", line 94, in run_cmd
return subprocess.check_output(cmd.split(" "))
File "/usr/local/Cellar/python2/2.7.14_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/subprocess.py", line 219, in check_output
raise CalledProcessError(retcode, cmd, output=output)
subprocess.CalledProcessError: Command '['git', 'rev-parse', '--abbrev-ref', 'HEAD']' returned non-zero exit status 128
```
Author: hyukjinkwon <gurwls223@apache.org>
Closes #21349 from HyukjinKwon/minor-merge-script.
2018-05-20 21:47:52 -04:00
|
|
|
branches = run_cmd("git branch").replace(" ", "").split("\n")
|
2014-02-09 02:13:34 -05:00
|
|
|
|
[MINOR][PROJECT-INFRA] Check if 'original_head' variable is defined in clean_up at merge script
## What changes were proposed in this pull request?
This PR proposes to check if global variable exists or not in clean_up. This can happen when it fails at:
https://github.com/apache/spark/blob/7013eea11cb32b1e0038dc751c485da5c94a484b/dev/merge_spark_pr.py#L423
I found this (It was my environment problem) but the error message took me a while to debug.
## How was this patch tested?
Manually tested:
**Before**
```
git rev-parse --abbrev-ref HEAD
fatal: Not a git repository (or any of the parent directories): .git
Traceback (most recent call last):
File "./dev/merge_spark_pr_jira.py", line 517, in <module>
clean_up()
File "./dev/merge_spark_pr_jira.py", line 104, in clean_up
print("Restoring head pointer to %s" % original_head)
NameError: global name 'original_head' is not defined
```
**After**
```
git rev-parse --abbrev-ref HEAD
fatal: Not a git repository (or any of the parent directories): .git
Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 516, in <module>
main()
File "./dev/merge_spark_pr.py", line 424, in main
original_head = get_current_ref()
File "./dev/merge_spark_pr.py", line 412, in get_current_ref
ref = run_cmd("git rev-parse --abbrev-ref HEAD").strip()
File "./dev/merge_spark_pr.py", line 94, in run_cmd
return subprocess.check_output(cmd.split(" "))
File "/usr/local/Cellar/python2/2.7.14_3/Frameworks/Python.framework/Versions/2.7/lib/python2.7/subprocess.py", line 219, in check_output
raise CalledProcessError(retcode, cmd, output=output)
subprocess.CalledProcessError: Command '['git', 'rev-parse', '--abbrev-ref', 'HEAD']' returned non-zero exit status 128
```
Author: hyukjinkwon <gurwls223@apache.org>
Closes #21349 from HyukjinKwon/minor-merge-script.
2018-05-20 21:47:52 -04:00
|
|
|
for branch in filter(lambda x: x.startswith(BRANCH_PREFIX), branches):
|
|
|
|
print("Deleting local branch %s" % branch)
|
|
|
|
run_cmd("git branch -D %s" % branch)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
|
|
|
|
|
|
|
# merge the requested PR and return the merge hash
|
2015-04-21 21:08:29 -04:00
|
|
|
def merge_pr(pr_num, target_ref, title, body, pr_repo_desc):
|
2014-05-27 00:40:52 -04:00
|
|
|
pr_branch_name = "%s_MERGE_PR_%s" % (BRANCH_PREFIX, pr_num)
|
|
|
|
target_branch_name = "%s_MERGE_PR_%s_%s" % (BRANCH_PREFIX, pr_num, target_ref.upper())
|
|
|
|
run_cmd("git fetch %s pull/%s/head:%s" % (PR_REMOTE_NAME, pr_num, pr_branch_name))
|
|
|
|
run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, target_ref, target_branch_name))
|
|
|
|
run_cmd("git checkout %s" % target_branch_name)
|
|
|
|
|
|
|
|
had_conflicts = False
|
|
|
|
try:
|
|
|
|
run_cmd(['git', 'merge', pr_branch_name, '--squash'])
|
|
|
|
except Exception as e:
|
|
|
|
msg = "Error merging: %s\nWould you like to manually fix-up this merge?" % e
|
|
|
|
continue_maybe(msg)
|
|
|
|
msg = "Okay, please fix any conflicts and 'git add' conflicting files... Finished?"
|
|
|
|
continue_maybe(msg)
|
|
|
|
had_conflicts = True
|
|
|
|
|
|
|
|
commit_authors = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
|
|
|
|
'--pretty=format:%an <%ae>']).split("\n")
|
|
|
|
distinct_authors = sorted(set(commit_authors),
|
|
|
|
key=lambda x: commit_authors.count(x), reverse=True)
|
2018-07-03 21:40:58 -04:00
|
|
|
primary_author = input(
|
2015-07-19 05:37:25 -04:00
|
|
|
"Enter primary author in the format of \"name <email>\" [%s]: " %
|
|
|
|
distinct_authors[0])
|
2015-07-21 02:31:08 -04:00
|
|
|
if primary_author == "":
|
|
|
|
primary_author = distinct_authors[0]
|
2018-08-06 22:31:11 -04:00
|
|
|
else:
|
|
|
|
# When primary author is specified manually, de-dup it from author list and
|
|
|
|
# put it at the head of author list.
|
|
|
|
distinct_authors = list(filter(lambda x: x != primary_author, distinct_authors))
|
|
|
|
distinct_authors.insert(0, primary_author)
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
commits = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
|
|
|
|
'--pretty=format:%h [%an] %s']).split("\n\n")
|
|
|
|
|
|
|
|
merge_message_flags = []
|
|
|
|
|
2014-06-07 19:16:37 -04:00
|
|
|
merge_message_flags += ["-m", title]
|
2014-07-31 17:35:09 -04:00
|
|
|
if body is not None:
|
2014-06-26 20:09:24 -04:00
|
|
|
# We remove @ symbols from the body to avoid triggering e-mails
|
|
|
|
# to people every time someone creates a public fork of Spark.
|
|
|
|
merge_message_flags += ["-m", body.replace("@", "")]
|
2014-05-27 00:40:52 -04:00
|
|
|
|
2018-08-06 22:31:11 -04:00
|
|
|
committer_name = run_cmd("git config --get user.name").strip()
|
|
|
|
committer_email = run_cmd("git config --get user.email").strip()
|
2014-05-27 00:40:52 -04:00
|
|
|
|
|
|
|
if had_conflicts:
|
|
|
|
message = "This patch had conflicts when merged, resolved by\nCommitter: %s <%s>" % (
|
|
|
|
committer_name, committer_email)
|
|
|
|
merge_message_flags += ["-m", message]
|
|
|
|
|
|
|
|
# The string "Closes #%s" string is required for GitHub to correctly close the PR
|
2015-08-11 04:08:30 -04:00
|
|
|
merge_message_flags += ["-m", "Closes #%s from %s." % (pr_num, pr_repo_desc)]
|
2014-05-27 00:40:52 -04:00
|
|
|
|
2018-08-06 22:31:11 -04:00
|
|
|
authors = "Authored-by:" if len(distinct_authors) == 1 else "Lead-authored-by:"
|
|
|
|
authors += " %s" % (distinct_authors.pop(0))
|
|
|
|
if len(distinct_authors) > 0:
|
|
|
|
authors += "\n" + "\n".join(["Co-authored-by: %s" % a for a in distinct_authors])
|
|
|
|
authors += "\n" + "Signed-off-by: %s <%s>" % (committer_name, committer_email)
|
|
|
|
|
|
|
|
merge_message_flags += ["-m", authors]
|
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
run_cmd(['git', 'commit', '--author="%s"' % primary_author] + merge_message_flags)
|
|
|
|
|
|
|
|
continue_maybe("Merge complete (local ref %s). Push to %s?" % (
|
|
|
|
target_branch_name, PUSH_REMOTE_NAME))
|
|
|
|
|
|
|
|
try:
|
|
|
|
run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, target_branch_name, target_ref))
|
|
|
|
except Exception as e:
|
|
|
|
clean_up()
|
|
|
|
fail("Exception while pushing: %s" % e)
|
|
|
|
|
|
|
|
merge_hash = run_cmd("git rev-parse %s" % target_branch_name)[:8]
|
|
|
|
clean_up()
|
|
|
|
print("Pull request #%s merged!" % pr_num)
|
|
|
|
print("Merge hash: %s" % merge_hash)
|
|
|
|
return merge_hash
|
2014-02-13 02:23:06 -05:00
|
|
|
|
2014-04-07 00:04:45 -04:00
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
def cherry_pick(pr_num, merge_hash, default_branch):
|
2018-07-03 21:40:58 -04:00
|
|
|
pick_ref = input("Enter a branch name [%s]: " % default_branch)
|
2014-05-27 00:40:52 -04:00
|
|
|
if pick_ref == "":
|
|
|
|
pick_ref = default_branch
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
pick_branch_name = "%s_PICK_PR_%s_%s" % (BRANCH_PREFIX, pr_num, pick_ref.upper())
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, pick_ref, pick_branch_name))
|
|
|
|
run_cmd("git checkout %s" % pick_branch_name)
|
2014-07-31 17:35:09 -04:00
|
|
|
|
|
|
|
try:
|
|
|
|
run_cmd("git cherry-pick -sx %s" % merge_hash)
|
|
|
|
except Exception as e:
|
|
|
|
msg = "Error cherry-picking: %s\nWould you like to manually fix-up this merge?" % e
|
|
|
|
continue_maybe(msg)
|
|
|
|
msg = "Okay, please fix any conflicts and finish the cherry-pick. Finished?"
|
|
|
|
continue_maybe(msg)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
continue_maybe("Pick complete (local ref %s). Push to %s?" % (
|
|
|
|
pick_branch_name, PUSH_REMOTE_NAME))
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
try:
|
|
|
|
run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, pick_branch_name, pick_ref))
|
|
|
|
except Exception as e:
|
|
|
|
clean_up()
|
|
|
|
fail("Exception while pushing: %s" % e)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
pick_hash = run_cmd("git rev-parse %s" % pick_branch_name)[:8]
|
2014-02-09 02:13:34 -05:00
|
|
|
clean_up()
|
|
|
|
|
2014-05-27 00:40:52 -04:00
|
|
|
print("Pull request #%s picked into %s!" % (pr_num, pick_ref))
|
|
|
|
print("Pick hash: %s" % pick_hash)
|
|
|
|
return pick_ref
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2014-04-27 18:41:57 -04:00
|
|
|
|
|
|
|
def fix_version_from_branch(branch, versions):
|
2014-05-27 00:40:52 -04:00
|
|
|
# Note: Assumes this is a sorted (newest->oldest) list of un-released versions
|
|
|
|
if branch == "master":
|
|
|
|
return versions[0]
|
|
|
|
else:
|
|
|
|
branch_ver = branch.replace("branch-", "")
|
|
|
|
return filter(lambda x: x.name.startswith(branch_ver), versions)[-1]
|
|
|
|
|
2014-04-27 18:41:57 -04:00
|
|
|
|
2014-11-29 23:12:10 -05:00
|
|
|
def resolve_jira_issue(merge_branches, comment, default_jira_id=""):
|
2014-05-27 00:40:52 -04:00
|
|
|
asf_jira = jira.client.JIRA({'server': JIRA_API_BASE},
|
|
|
|
basic_auth=(JIRA_USERNAME, JIRA_PASSWORD))
|
|
|
|
|
2018-07-03 21:40:58 -04:00
|
|
|
jira_id = input("Enter a JIRA id [%s]: " % default_jira_id)
|
2014-05-27 00:40:52 -04:00
|
|
|
if jira_id == "":
|
|
|
|
jira_id = default_jira_id
|
|
|
|
|
|
|
|
try:
|
|
|
|
issue = asf_jira.issue(jira_id)
|
|
|
|
except Exception as e:
|
|
|
|
fail("ASF JIRA could not find %s\n%s" % (jira_id, e))
|
|
|
|
|
|
|
|
cur_status = issue.fields.status.name
|
|
|
|
cur_summary = issue.fields.summary
|
|
|
|
cur_assignee = issue.fields.assignee
|
2017-12-29 08:30:49 -05:00
|
|
|
if cur_assignee is None:
|
2017-12-29 18:07:01 -05:00
|
|
|
cur_assignee = choose_jira_assignee(issue, asf_jira)
|
|
|
|
# Check again, we might not have chosen an assignee
|
2014-05-27 00:40:52 -04:00
|
|
|
if cur_assignee is None:
|
|
|
|
cur_assignee = "NOT ASSIGNED!!!"
|
|
|
|
else:
|
|
|
|
cur_assignee = cur_assignee.displayName
|
|
|
|
|
|
|
|
if cur_status == "Resolved" or cur_status == "Closed":
|
|
|
|
fail("JIRA issue %s already has status '%s'" % (jira_id, cur_status))
|
2017-01-02 10:23:19 -05:00
|
|
|
print("=== JIRA %s ===" % jira_id)
|
|
|
|
print("summary\t\t%s\nassignee\t%s\nstatus\t\t%s\nurl\t\t%s/%s\n" %
|
|
|
|
(cur_summary, cur_assignee, cur_status, JIRA_BASE, jira_id))
|
2014-05-27 00:40:52 -04:00
|
|
|
|
|
|
|
versions = asf_jira.project_versions("SPARK")
|
|
|
|
versions = sorted(versions, key=lambda x: x.name, reverse=True)
|
|
|
|
versions = filter(lambda x: x.raw['released'] is False, versions)
|
2015-02-12 15:14:45 -05:00
|
|
|
# Consider only x.y.z versions
|
2018-09-12 23:19:43 -04:00
|
|
|
versions = filter(lambda x: re.match(r'\d+\.\d+\.\d+', x.name), versions)
|
2014-05-27 00:40:52 -04:00
|
|
|
|
|
|
|
default_fix_versions = map(lambda x: fix_version_from_branch(x, versions).name, merge_branches)
|
|
|
|
for v in default_fix_versions:
|
|
|
|
# Handles the case where we have forked a release branch but not yet made the release.
|
|
|
|
# In this case, if the PR is committed to the master branch and the release branch, we
|
|
|
|
# only consider the release branch to be the fix version. E.g. it is not valid to have
|
|
|
|
# both 1.1.0 and 1.0.0 as fix versions.
|
|
|
|
(major, minor, patch) = v.split(".")
|
|
|
|
if patch == "0":
|
|
|
|
previous = "%s.%s.%s" % (major, int(minor) - 1, 0)
|
|
|
|
if previous in default_fix_versions:
|
|
|
|
default_fix_versions = filter(lambda x: x != v, default_fix_versions)
|
|
|
|
default_fix_versions = ",".join(default_fix_versions)
|
|
|
|
|
2018-07-03 21:40:58 -04:00
|
|
|
fix_versions = input("Enter comma-separated fix version(s) [%s]: " % default_fix_versions)
|
2014-05-27 00:40:52 -04:00
|
|
|
if fix_versions == "":
|
|
|
|
fix_versions = default_fix_versions
|
|
|
|
fix_versions = fix_versions.replace(" ", "").split(",")
|
|
|
|
|
|
|
|
def get_version_json(version_str):
|
|
|
|
return filter(lambda v: v.name == version_str, versions)[0].raw
|
|
|
|
|
|
|
|
jira_fix_versions = map(lambda v: get_version_json(v), fix_versions)
|
|
|
|
|
|
|
|
resolve = filter(lambda a: a['name'] == "Resolve Issue", asf_jira.transitions(jira_id))[0]
|
2015-05-12 21:20:54 -04:00
|
|
|
resolution = filter(lambda r: r.raw['name'] == "Fixed", asf_jira.resolutions())[0]
|
2014-05-27 00:40:52 -04:00
|
|
|
asf_jira.transition_issue(
|
2017-01-02 10:23:19 -05:00
|
|
|
jira_id, resolve["id"], fixVersions=jira_fix_versions,
|
|
|
|
comment=comment, resolution={'id': resolution.raw['id']})
|
2014-05-27 00:40:52 -04:00
|
|
|
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Successfully resolved %s with fixVersions=%s!" % (jira_id, fix_versions))
|
2014-05-27 00:40:52 -04:00
|
|
|
|
2014-04-27 18:41:57 -04:00
|
|
|
|
2017-12-29 08:30:49 -05:00
|
|
|
def choose_jira_assignee(issue, asf_jira):
|
|
|
|
"""
|
|
|
|
Prompt the user to choose who to assign the issue to in jira, given a list of candidates,
|
|
|
|
including the original reporter and all commentors
|
|
|
|
"""
|
2018-01-16 19:25:10 -05:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
reporter = issue.fields.reporter
|
|
|
|
commentors = map(lambda x: x.author, issue.fields.comment.comments)
|
|
|
|
candidates = set(commentors)
|
|
|
|
candidates.add(reporter)
|
|
|
|
candidates = list(candidates)
|
|
|
|
print("JIRA is unassigned, choose assignee")
|
|
|
|
for idx, author in enumerate(candidates):
|
|
|
|
if author.key == "apachespark":
|
|
|
|
continue
|
|
|
|
annotations = ["Reporter"] if author == reporter else []
|
|
|
|
if author in commentors:
|
|
|
|
annotations.append("Commentor")
|
|
|
|
print("[%d] %s (%s)" % (idx, author.displayName, ",".join(annotations)))
|
2018-07-03 21:40:58 -04:00
|
|
|
raw_assignee = input(
|
[SPARK-24929][INFRA] Make merge script don't swallow KeyboardInterrupt
## What changes were proposed in this pull request?
If you want to get out of the loop to assign JIRA's user by command+c (KeyboardInterrupt), I am unable to get out. I faced this problem when the user doesn't have a contributor role and I just wanted to cancel and manually take an action to the JIRA.
**Before:**
```
JIRA is unassigned, choose assignee
[0] todd.chen (Reporter)
Enter number of user, or userid, to assign to (blank to leave unassigned):Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 322, in choose_jira_assignee
"Enter number of user, or userid, to assign to (blank to leave unassigned):")
KeyboardInterrupt
Error assigning JIRA, try again (or leave blank and fix manually)
JIRA is unassigned, choose assignee
[0] todd.chen (Reporter)
Enter number of user, or userid, to assign to (blank to leave unassigned):Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 322, in choose_jira_assignee
"Enter number of user, or userid, to assign to (blank to leave unassigned):")
KeyboardInterrupt
```
**After:**
```
JIRA is unassigned, choose assignee
[0] Dongjoon Hyun (Reporter)
Enter number of user, or userid to assign to (blank to leave unassigned):Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 322, in choose_jira_assignee
"Enter number of user, or userid to assign to (blank to leave unassigned):")
KeyboardInterrupt
Restoring head pointer to master
git checkout master
Already on 'master'
git branch
```
## How was this patch tested?
I tested this manually (I use my own merging script with few fixes).
Author: hyukjinkwon <gurwls223@apache.org>
Closes #21880 from HyukjinKwon/key-error.
2018-07-27 01:29:54 -04:00
|
|
|
"Enter number of user, or userid, to assign to (blank to leave unassigned):")
|
2018-01-16 19:25:10 -05:00
|
|
|
if raw_assignee == "":
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
id = int(raw_assignee)
|
|
|
|
assignee = candidates[id]
|
|
|
|
except:
|
|
|
|
# assume it's a user id, and try to assign (might fail, we just prompt again)
|
|
|
|
assignee = asf_jira.user(raw_assignee)
|
|
|
|
asf_jira.assign_issue(issue.key, assignee.key)
|
|
|
|
return assignee
|
[SPARK-24929][INFRA] Make merge script don't swallow KeyboardInterrupt
## What changes were proposed in this pull request?
If you want to get out of the loop to assign JIRA's user by command+c (KeyboardInterrupt), I am unable to get out. I faced this problem when the user doesn't have a contributor role and I just wanted to cancel and manually take an action to the JIRA.
**Before:**
```
JIRA is unassigned, choose assignee
[0] todd.chen (Reporter)
Enter number of user, or userid, to assign to (blank to leave unassigned):Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 322, in choose_jira_assignee
"Enter number of user, or userid, to assign to (blank to leave unassigned):")
KeyboardInterrupt
Error assigning JIRA, try again (or leave blank and fix manually)
JIRA is unassigned, choose assignee
[0] todd.chen (Reporter)
Enter number of user, or userid, to assign to (blank to leave unassigned):Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 322, in choose_jira_assignee
"Enter number of user, or userid, to assign to (blank to leave unassigned):")
KeyboardInterrupt
```
**After:**
```
JIRA is unassigned, choose assignee
[0] Dongjoon Hyun (Reporter)
Enter number of user, or userid to assign to (blank to leave unassigned):Traceback (most recent call last):
File "./dev/merge_spark_pr.py", line 322, in choose_jira_assignee
"Enter number of user, or userid to assign to (blank to leave unassigned):")
KeyboardInterrupt
Restoring head pointer to master
git checkout master
Already on 'master'
git branch
```
## How was this patch tested?
I tested this manually (I use my own merging script with few fixes).
Author: hyukjinkwon <gurwls223@apache.org>
Closes #21880 from HyukjinKwon/key-error.
2018-07-27 01:29:54 -04:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
2018-01-16 19:25:10 -05:00
|
|
|
except:
|
|
|
|
traceback.print_exc()
|
|
|
|
print("Error assigning JIRA, try again (or leave blank and fix manually)")
|
2017-12-29 08:30:49 -05:00
|
|
|
|
|
|
|
|
2014-11-29 23:12:10 -05:00
|
|
|
def resolve_jira_issues(title, merge_branches, comment):
|
|
|
|
jira_ids = re.findall("SPARK-[0-9]{4,5}", title)
|
|
|
|
|
|
|
|
if len(jira_ids) == 0:
|
|
|
|
resolve_jira_issue(merge_branches, comment)
|
|
|
|
for jira_id in jira_ids:
|
|
|
|
resolve_jira_issue(merge_branches, comment, jira_id)
|
|
|
|
|
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
def standardize_jira_ref(text):
|
|
|
|
"""
|
|
|
|
Standardize the [SPARK-XXXXX] [MODULE] prefix
|
2017-01-02 10:23:19 -05:00
|
|
|
Converts "[SPARK-XXX][mllib] Issue", "[MLLib] SPARK-XXX. Issue" or "SPARK XXX [MLLIB]: Issue" to
|
|
|
|
"[SPARK-XXX][MLLIB] Issue"
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2017-01-02 10:23:19 -05:00
|
|
|
>>> standardize_jira_ref(
|
|
|
|
... "[SPARK-5821] [SQL] ParquetRelation2 CTAS should check if delete is successful")
|
2015-10-18 12:54:38 -04:00
|
|
|
'[SPARK-5821][SQL] ParquetRelation2 CTAS should check if delete is successful'
|
2017-01-02 10:23:19 -05:00
|
|
|
>>> standardize_jira_ref(
|
|
|
|
... "[SPARK-4123][Project Infra][WIP]: Show new dependencies added in pull requests")
|
2015-10-18 12:54:38 -04:00
|
|
|
'[SPARK-4123][PROJECT INFRA][WIP] Show new dependencies added in pull requests'
|
2015-04-21 21:08:29 -04:00
|
|
|
>>> standardize_jira_ref("[MLlib] Spark 5954: Top by key")
|
2015-10-18 12:54:38 -04:00
|
|
|
'[SPARK-5954][MLLIB] Top by key'
|
2015-04-21 21:08:29 -04:00
|
|
|
>>> standardize_jira_ref("[SPARK-979] a LRU scheduler for load balancing in TaskSchedulerImpl")
|
|
|
|
'[SPARK-979] a LRU scheduler for load balancing in TaskSchedulerImpl'
|
2017-01-02 10:23:19 -05:00
|
|
|
>>> standardize_jira_ref(
|
2018-08-11 22:23:36 -04:00
|
|
|
... "SPARK-1094 Support MiMa for reporting binary compatibility across versions.")
|
|
|
|
'[SPARK-1094] Support MiMa for reporting binary compatibility across versions.'
|
2015-04-21 21:08:29 -04:00
|
|
|
>>> standardize_jira_ref("[WIP] [SPARK-1146] Vagrant support for Spark")
|
2015-10-18 12:54:38 -04:00
|
|
|
'[SPARK-1146][WIP] Vagrant support for Spark'
|
2017-01-02 10:23:19 -05:00
|
|
|
>>> standardize_jira_ref(
|
|
|
|
... "SPARK-1032. If Yarn app fails before registering, app master stays aroun...")
|
2015-04-21 21:08:29 -04:00
|
|
|
'[SPARK-1032] If Yarn app fails before registering, app master stays aroun...'
|
2017-01-02 10:23:19 -05:00
|
|
|
>>> standardize_jira_ref(
|
|
|
|
... "[SPARK-6250][SPARK-6146][SPARK-5911][SQL] Types are now reserved words in DDL parser.")
|
2015-10-18 12:54:38 -04:00
|
|
|
'[SPARK-6250][SPARK-6146][SPARK-5911][SQL] Types are now reserved words in DDL parser.'
|
2015-04-21 21:08:29 -04:00
|
|
|
>>> standardize_jira_ref("Additional information for users building from source code")
|
|
|
|
'Additional information for users building from source code'
|
|
|
|
"""
|
|
|
|
jira_refs = []
|
|
|
|
components = []
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
# If the string is compliant, no need to process any further
|
2015-10-18 12:54:38 -04:00
|
|
|
if (re.search(r'^\[SPARK-[0-9]{3,6}\](\[[A-Z0-9_\s,]+\] )+\S+', text)):
|
2015-04-21 21:08:29 -04:00
|
|
|
return text
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
# Extract JIRA ref(s):
|
|
|
|
pattern = re.compile(r'(SPARK[-\s]*[0-9]{3,6})+', re.IGNORECASE)
|
|
|
|
for ref in pattern.findall(text):
|
|
|
|
# Add brackets, replace spaces with a dash, & convert to uppercase
|
|
|
|
jira_refs.append('[' + re.sub(r'\s+', '-', ref.upper()) + ']')
|
|
|
|
text = text.replace(ref, '')
|
|
|
|
|
|
|
|
# Extract spark component(s):
|
|
|
|
# Look for alphanumeric chars, spaces, dashes, periods, and/or commas
|
2018-09-12 23:19:43 -04:00
|
|
|
pattern = re.compile(r'(\[[\w\s,.-]+\])', re.IGNORECASE)
|
2015-04-21 21:08:29 -04:00
|
|
|
for component in pattern.findall(text):
|
|
|
|
components.append(component.upper())
|
|
|
|
text = text.replace(component, '')
|
|
|
|
|
|
|
|
# Cleanup any remaining symbols:
|
|
|
|
pattern = re.compile(r'^\W+(.*)', re.IGNORECASE)
|
|
|
|
if (pattern.search(text) is not None):
|
|
|
|
text = pattern.search(text).groups()[0]
|
|
|
|
|
|
|
|
# Assemble full text (JIRA ref(s), module(s), remaining text)
|
2015-10-18 12:54:38 -04:00
|
|
|
clean_text = ''.join(jira_refs).strip() + ''.join(components).strip() + " " + text.strip()
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2017-01-02 10:23:19 -05:00
|
|
|
# Replace multiple spaces with a single space, e.g. if no jira refs and/or components were
|
|
|
|
# included
|
2015-04-21 21:08:29 -04:00
|
|
|
clean_text = re.sub(r'\s+', ' ', clean_text.strip())
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
return clean_text
|
|
|
|
|
2016-01-13 14:56:30 -05:00
|
|
|
|
|
|
|
def get_current_ref():
|
|
|
|
ref = run_cmd("git rev-parse --abbrev-ref HEAD").strip()
|
|
|
|
if ref == 'HEAD':
|
|
|
|
# The current ref is a detached HEAD, so grab its SHA.
|
|
|
|
return run_cmd("git rev-parse HEAD").strip()
|
|
|
|
else:
|
|
|
|
return ref
|
|
|
|
|
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
def main():
|
|
|
|
global original_head
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
os.chdir(SPARK_HOME)
|
2016-01-13 14:56:30 -05:00
|
|
|
original_head = get_current_ref()
|
2015-07-19 05:37:25 -04:00
|
|
|
|
2018-08-30 18:08:12 -04:00
|
|
|
# Check this up front to avoid failing the JIRA update at the very end
|
|
|
|
if not JIRA_USERNAME or not JIRA_PASSWORD:
|
|
|
|
continue_maybe("The env-vars JIRA_USERNAME and/or JIRA_PASSWORD are not set. Continue?")
|
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
branches = get_json("%s/branches" % GITHUB_API_BASE)
|
|
|
|
branch_names = filter(lambda x: x.startswith("branch-"), [x['name'] for x in branches])
|
|
|
|
# Assumes branch names can be sorted lexicographically
|
|
|
|
latest_branch = sorted(branch_names, reverse=True)[0]
|
|
|
|
|
2018-07-03 21:40:58 -04:00
|
|
|
pr_num = input("Which pull request would you like to merge? (e.g. 34): ")
|
2015-04-21 21:08:29 -04:00
|
|
|
pr = get_json("%s/pulls/%s" % (GITHUB_API_BASE, pr_num))
|
|
|
|
pr_events = get_json("%s/issues/%s/events" % (GITHUB_API_BASE, pr_num))
|
|
|
|
|
|
|
|
url = pr["url"]
|
|
|
|
|
|
|
|
# Decide whether to use the modified title or not
|
|
|
|
modified_title = standardize_jira_ref(pr["title"])
|
|
|
|
if modified_title != pr["title"]:
|
2017-01-02 10:23:19 -05:00
|
|
|
print("I've re-written the title as follows to match the standard format:")
|
|
|
|
print("Original: %s" % pr["title"])
|
|
|
|
print("Modified: %s" % modified_title)
|
2018-07-03 21:40:58 -04:00
|
|
|
result = input("Would you like to use the modified title? (y/n): ")
|
2015-04-21 21:08:29 -04:00
|
|
|
if result.lower() == "y":
|
|
|
|
title = modified_title
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Using modified title:")
|
2015-04-21 21:08:29 -04:00
|
|
|
else:
|
|
|
|
title = pr["title"]
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Using original title:")
|
|
|
|
print(title)
|
2015-04-21 21:08:29 -04:00
|
|
|
else:
|
|
|
|
title = pr["title"]
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
body = pr["body"]
|
|
|
|
target_ref = pr["base"]["ref"]
|
|
|
|
user_login = pr["user"]["login"]
|
|
|
|
base_ref = pr["head"]["ref"]
|
|
|
|
pr_repo_desc = "%s/%s" % (user_login, base_ref)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
# Merged pull requests don't appear as merged in the GitHub API;
|
|
|
|
# Instead, they're closed by asfgit.
|
|
|
|
merge_commits = \
|
|
|
|
[e for e in pr_events if e["actor"]["login"] == "asfgit" and e["event"] == "closed"]
|
2014-07-31 17:35:09 -04:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
if merge_commits:
|
|
|
|
merge_hash = merge_commits[0]["commit_id"]
|
|
|
|
message = get_json("%s/commits/%s" % (GITHUB_API_BASE, merge_hash))["commit"]["message"]
|
2014-07-31 17:35:09 -04:00
|
|
|
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Pull request %s has already been merged, assuming you want to backport" % pr_num)
|
2015-04-21 21:08:29 -04:00
|
|
|
commit_is_downloaded = run_cmd(['git', 'rev-parse', '--quiet', '--verify',
|
2017-01-02 10:23:19 -05:00
|
|
|
"%s^{commit}" % merge_hash]).strip() != ""
|
2015-04-21 21:08:29 -04:00
|
|
|
if not commit_is_downloaded:
|
|
|
|
fail("Couldn't find any merge commit for #%s, you may need to update HEAD." % pr_num)
|
2014-05-27 00:40:52 -04:00
|
|
|
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Found commit %s:\n%s" % (merge_hash, message))
|
2015-04-21 21:08:29 -04:00
|
|
|
cherry_pick(pr_num, merge_hash, latest_branch)
|
|
|
|
sys.exit(0)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
if not bool(pr["mergeable"]):
|
|
|
|
msg = "Pull request %s is not mergeable in its current form.\n" % pr_num + \
|
|
|
|
"Continue? (experts only!)"
|
|
|
|
continue_maybe(msg)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2017-01-02 10:23:19 -05:00
|
|
|
print("\n=== Pull Request #%s ===" % pr_num)
|
|
|
|
print("title\t%s\nsource\t%s\ntarget\t%s\nurl\t%s" %
|
|
|
|
(title, pr_repo_desc, target_ref, url))
|
2015-04-21 21:08:29 -04:00
|
|
|
continue_maybe("Proceed with merging pull request #%s?" % pr_num)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
merged_refs = [target_ref]
|
2014-04-27 18:41:57 -04:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
merge_hash = merge_pr(pr_num, target_ref, title, body, pr_repo_desc)
|
2014-02-09 02:13:34 -05:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
pick_prompt = "Would you like to pick %s into another branch?" % merge_hash
|
2018-07-03 21:40:58 -04:00
|
|
|
while input("\n%s (y/n): " % pick_prompt).lower() == "y":
|
2015-04-21 21:08:29 -04:00
|
|
|
merged_refs = merged_refs + [cherry_pick(pr_num, merge_hash, latest_branch)]
|
2014-04-27 18:41:57 -04:00
|
|
|
|
2015-04-21 21:08:29 -04:00
|
|
|
if JIRA_IMPORTED:
|
|
|
|
if JIRA_USERNAME and JIRA_PASSWORD:
|
|
|
|
continue_maybe("Would you like to update an associated JIRA?")
|
2017-01-02 10:23:19 -05:00
|
|
|
jira_comment = "Issue resolved by pull request %s\n[%s/%s]" % \
|
|
|
|
(pr_num, GITHUB_BASE, pr_num)
|
2015-04-21 21:08:29 -04:00
|
|
|
resolve_jira_issues(title, merged_refs, jira_comment)
|
|
|
|
else:
|
2017-01-02 10:23:19 -05:00
|
|
|
print("JIRA_USERNAME and JIRA_PASSWORD not set")
|
|
|
|
print("Exiting without trying to close the associated JIRA.")
|
2014-07-31 17:35:09 -04:00
|
|
|
else:
|
2017-01-02 10:23:19 -05:00
|
|
|
print("Could not find jira-python library. Run 'sudo pip install jira' to install.")
|
|
|
|
print("Exiting without trying to close the associated JIRA.")
|
2015-04-21 21:08:29 -04:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
import doctest
|
2015-06-26 11:12:22 -04:00
|
|
|
(failure_count, test_count) = doctest.testmod()
|
|
|
|
if failure_count:
|
2018-03-08 06:38:34 -05:00
|
|
|
sys.exit(-1)
|
2016-01-13 14:56:30 -05:00
|
|
|
try:
|
|
|
|
main()
|
|
|
|
except:
|
|
|
|
clean_up()
|
|
|
|
raise
|