Auto merge of #82340 - kennytm:fix-82254, r=Mark-Simulacrum
Fix some Python2→3 error in publish_toolstate.py Fix #82254. The error is primarily due to `data = json.dumps(…)` producing a `str` instead of a `bytes`, which are different types on Python 3. But then `urllib.request.urlopen(…, data)` cannot accept `data` as a `str`, thus the error. This PR added `.encode()` call after `json.dumps()` to ensure we are sending `bytes`. Additionally, we added type annotation to ensure things can statically type-check with `mypy` on both Python 2 and 3.
This commit is contained in:
commit
ef14688221
1 changed files with 34 additions and 16 deletions
|
@ -17,8 +17,14 @@ import collections
|
|||
import textwrap
|
||||
try:
|
||||
import urllib2
|
||||
from urllib2 import HTTPError
|
||||
except ImportError:
|
||||
import urllib.request as urllib2
|
||||
from urllib.error import HTTPError
|
||||
try:
|
||||
import typing
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# List of people to ping when the status of a tool or a book changed.
|
||||
# These should be collaborators of the rust-lang/rust repository (with at least
|
||||
|
@ -63,20 +69,23 @@ REPOS = {
|
|||
}
|
||||
|
||||
def load_json_from_response(resp):
|
||||
# type: (typing.Any) -> typing.Any
|
||||
content = resp.read()
|
||||
if isinstance(content, bytes):
|
||||
content = content.decode('utf-8')
|
||||
content_str = content.decode('utf-8')
|
||||
else:
|
||||
print("Refusing to decode " + str(type(content)) + " to str")
|
||||
return json.loads(content)
|
||||
return json.loads(content_str)
|
||||
|
||||
def validate_maintainers(repo, github_token):
|
||||
# type: (str, str) -> None
|
||||
'''Ensure all maintainers are assignable on a GitHub repo'''
|
||||
next_link_re = re.compile(r'<([^>]+)>; rel="next"')
|
||||
|
||||
# Load the list of assignable people in the GitHub repo
|
||||
assignable = []
|
||||
url = 'https://api.github.com/repos/%s/collaborators?per_page=100' % repo
|
||||
assignable = [] # type: typing.List[str]
|
||||
url = 'https://api.github.com/repos/' \
|
||||
+ '%s/collaborators?per_page=100' % repo # type: typing.Optional[str]
|
||||
while url is not None:
|
||||
response = urllib2.urlopen(urllib2.Request(url, headers={
|
||||
'Authorization': 'token ' + github_token,
|
||||
|
@ -116,9 +125,10 @@ def validate_maintainers(repo, github_token):
|
|||
|
||||
|
||||
def read_current_status(current_commit, path):
|
||||
# type: (str, str) -> typing.Mapping[str, typing.Any]
|
||||
'''Reads build status of `current_commit` from content of `history/*.tsv`
|
||||
'''
|
||||
with open(path, 'rU') as f:
|
||||
with open(path, 'r') as f:
|
||||
for line in f:
|
||||
(commit, status) = line.split('\t', 1)
|
||||
if commit == current_commit:
|
||||
|
@ -127,10 +137,12 @@ def read_current_status(current_commit, path):
|
|||
|
||||
|
||||
def gh_url():
|
||||
# type: () -> str
|
||||
return os.environ['TOOLSTATE_ISSUES_API_URL']
|
||||
|
||||
|
||||
def maybe_delink(message):
|
||||
# type: (str) -> str
|
||||
if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
|
||||
return message.replace("@", "")
|
||||
return message
|
||||
|
@ -143,8 +155,10 @@ def issue(
|
|||
relevant_pr_number,
|
||||
relevant_pr_user,
|
||||
labels,
|
||||
github_token,
|
||||
):
|
||||
# Open an issue about the toolstate failure.
|
||||
# type: (str, str, typing.Iterable[str], str, str, typing.List[str], str) -> None
|
||||
'''Open an issue about the toolstate failure.'''
|
||||
if status == 'test-fail':
|
||||
status_description = 'has failing tests'
|
||||
else:
|
||||
|
@ -168,7 +182,7 @@ def issue(
|
|||
print("Creating issue:\n{}".format(request))
|
||||
response = urllib2.urlopen(urllib2.Request(
|
||||
gh_url(),
|
||||
request,
|
||||
request.encode(),
|
||||
{
|
||||
'Authorization': 'token ' + github_token,
|
||||
'Content-Type': 'application/json',
|
||||
|
@ -183,8 +197,10 @@ def update_latest(
|
|||
relevant_pr_url,
|
||||
relevant_pr_user,
|
||||
pr_reviewer,
|
||||
current_datetime
|
||||
current_datetime,
|
||||
github_token,
|
||||
):
|
||||
# type: (str, str, str, str, str, str, str) -> str
|
||||
'''Updates `_data/latest.json` to match build result of the given commit.
|
||||
'''
|
||||
with open('_data/latest.json', 'r+') as f:
|
||||
|
@ -243,13 +259,14 @@ def update_latest(
|
|||
if create_issue_for_status is not None:
|
||||
try:
|
||||
issue(
|
||||
tool, create_issue_for_status, MAINTAINERS.get(tool, ''),
|
||||
relevant_pr_number, relevant_pr_user, LABELS.get(tool, ''),
|
||||
tool, create_issue_for_status, MAINTAINERS.get(tool, ()),
|
||||
relevant_pr_number, relevant_pr_user, LABELS.get(tool, []),
|
||||
github_token,
|
||||
)
|
||||
except urllib2.HTTPError as e:
|
||||
except HTTPError as e:
|
||||
# network errors will simply end up not creating an issue, but that's better
|
||||
# than failing the entire build job
|
||||
print("HTTPError when creating issue for status regression: {0}\n{1}"
|
||||
print("HTTPError when creating issue for status regression: {0}\n{1!r}"
|
||||
.format(e, e.read()))
|
||||
except IOError as e:
|
||||
print("I/O error when creating issue for status regression: {0}".format(e))
|
||||
|
@ -318,7 +335,8 @@ try:
|
|||
relevant_pr_url,
|
||||
relevant_pr_user,
|
||||
pr_reviewer,
|
||||
cur_datetime
|
||||
cur_datetime,
|
||||
github_token,
|
||||
)
|
||||
if not message:
|
||||
print('<Nothing changed>')
|
||||
|
@ -337,13 +355,13 @@ try:
|
|||
issue_url = gh_url() + '/{}/comments'.format(number)
|
||||
response = urllib2.urlopen(urllib2.Request(
|
||||
issue_url,
|
||||
json.dumps({'body': maybe_delink(message)}),
|
||||
json.dumps({'body': maybe_delink(message)}).encode(),
|
||||
{
|
||||
'Authorization': 'token ' + github_token,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
))
|
||||
response.read()
|
||||
except urllib2.HTTPError as e:
|
||||
print("HTTPError: %s\n%s" % (e, e.read()))
|
||||
except HTTPError as e:
|
||||
print("HTTPError: %s\n%r" % (e, e.read()))
|
||||
raise
|
||||
|
|
Loading…
Add table
Reference in a new issue