Skip to content

Commit

Permalink
Implement paging for GitHub issues
Browse files Browse the repository at this point in the history
The previous code only fetched the first page of API results, thus missing most of the existing issues.
As a result, it filed duplicate issues on every invocation.

Part of bazelbuild#869
  • Loading branch information
fweikert committed Dec 24, 2019
1 parent eb7da07 commit 099ef04
Showing 1 changed file with 24 additions and 10 deletions.
34 changes: 24 additions & 10 deletions buildkite/aggregate_incompatible_flags_test_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,9 @@ def __init__(self, code, message):


class GitHubIssueClient(object):

LINK_PATTERN = re.compile(r'<(?P<url>.*?)>; rel="(?P<type>\w+)"')

def __init__(self, reporter, oauth_token):
self._reporter = reporter
self._session = requests.Session()
Expand All @@ -91,10 +94,11 @@ def __init__(self, reporter, oauth_token):

def get_issue(self, repo_owner, repo_name, title):
# Returns an arbitrary matching issue if multiple matching issues exist.
json_data = self._send_request(repo_owner, repo_name, params={"creator": self._reporter})
for i in json_data:
if i["title"] == title:
return i["number"]
generator = self._send_request(repo_owner, repo_name, params={"creator": self._reporter})
for issue_subset in generator:
for i in issue_subset:
if i["title"] == title:
return i["number"]

def create_issue(self, repo_owner, repo_name, title, body):
json_data = self._send_request(
Expand All @@ -103,7 +107,7 @@ def create_issue(self, repo_owner, repo_name, title, body):
verb="post",
json={"title": title, "body": body, "assignee": None, "labels": [], "milestone": None},
)
return json_data.get("number", "")
return list(json_data)[0].get("number", "")

def update_title(self, repo_owner, repo_name, issue_number, title):
self._send_request(
Expand All @@ -116,11 +120,21 @@ def _send_request(self, repo_owner, repo_name, issue=None, verb="get", **kwargs)
url = os.path.join(url, str(issue))

method = getattr(self._session, verb)
response = method(url, **kwargs)
if response.status_code // 100 != 2:
raise GitHubError(response.status_code, response.content)

return response.json()
while url:
response = method(url, **kwargs)
if response.status_code // 100 != 2:
raise GitHubError(response.status_code, response.content)

url = self.get_next_page_url(response.headers)
yield response.json()

def get_next_page_url(self, headers):
link = headers.get("Link")
for part in link.split(","):
match = self.LINK_PATTERN.match(part.strip())
if match and match.group("type") == "next":
return match.group("url")


class LogFetcher(threading.Thread):
Expand Down Expand Up @@ -360,7 +374,7 @@ def handle_already_flipped_flags(failed_jobs_per_flag, details_per_flag):
details_for_new_flags[flag] = details
if flag in failed_jobs_per_flag:
failed_jobs_for_new_flags[flag] = failed_jobs_per_flag[flag]

return failed_jobs_for_new_flags, details_for_new_flags


Expand Down

0 comments on commit 099ef04

Please sign in to comment.