diff --git a/tools/attention_list.py b/tools/attention_list.py index eea62343..e03d91b4 100644 --- a/tools/attention_list.py +++ b/tools/attention_list.py @@ -21,19 +21,34 @@ import argparse import logging import requests import json -import threading + class FailedPR: """Base class for failed Pull Requests""" - def __init__(self, host, url, created_at, updated_at, zuul_url=None, status=None, error=None): - self.host = host - self.url = url - self.status = status - self.zuul_url = zuul_url + def __init__( + self, + created_at, + host, + updated_at, + url, + error=None, + pullrequest=None, + org=None, + repo=None, + status=None, + zuul_url=None): + self.created_at = created_at - self.updated_at = updated_at self.error = error + self.host = host + self.org = org + self.pullrequest = pullrequest + self.repo = repo + self.status = status + self.updated_at = updated_at + self.url = url + self.zuul_url = zuul_url def get_args(): @@ -92,6 +107,7 @@ def get_args(): ) return parser.parse_args() + def get_gitea_repos(url, headers, gitea_org): """ Get all Repositories of one Gitea orgainzation @@ -101,10 +117,13 @@ def get_gitea_repos(url, headers, gitea_org): while True: try: - req_url = (url + 'orgs/' + gitea_org + - '/repos?limit=50&page=' + str(i)) + req_url = (url + + 'orgs/' + + gitea_org + + '/repos?limit=50&page=' + + str(i)) res = requests.request('GET', url=req_url, headers=headers) - i+=1 + i += 1 if res.json(): for repo in res.json(): repositories.append(repo) @@ -114,7 +133,7 @@ def get_gitea_repos(url, headers, gitea_org): except Exception as e: print("An error has occured: " + str(e)) print("The request status is: " + str(res.status_code) + - " | " + str(res.reason)) + " | " + str(res.reason)) break return repositories @@ -152,6 +171,9 @@ def get_failed_gitea_commits(pull, url, gitea_org, repo, headers): o = FailedPR( host='gitea', url=pull['url'], + org=gitea_org, + repo=repo['name'], + pullrequest=pull['title'], status=res_sta.json()[0]['status'], zuul_url=res_sta.json()[0]['target_url'], created_at=pull['created_at'], @@ -219,6 +241,7 @@ def get_github_prs(url, headers, gh_org, repo): break return pullrequests + def get_failed_gh_commits(pull, url, gh_org, repo, headers): """ Collect all Failed Pull Requests of one GitHub repository @@ -234,6 +257,9 @@ def get_failed_gh_commits(pull, url, gh_org, repo, headers): o = FailedPR( host='github', url=pull['html_url'], + org=gh_org, + repo=repo['name'], + pullrequest=pull['title'], status=res_sta.json()['check_runs'][0]['conclusion'], zuul_url=(res_sta.json()['check_runs'] [0]['details_url']), @@ -247,6 +273,9 @@ def get_failed_gh_commits(pull, url, gh_org, repo, headers): o = FailedPR( host='github', url=pull['html_url'], + org=gh_org, + repo=repo['name'], + pullrequest=pull['title'], created_at=pull['created_at'], updated_at=pull['updated_at'], error=1001, @@ -261,6 +290,7 @@ def get_failed_gh_commits(pull, url, gh_org, repo, headers): exit() return failed_commits + def create_json_result(failed_commits): """ Create Result @@ -278,7 +308,7 @@ def create_json_result(failed_commits): result['meta']['count'] = 0 return json.dumps(result) - + def main(): args = get_args() @@ -355,9 +385,9 @@ def main(): headers=headers ) failed_commits.extend(commits) - - print(create_json_result( - failed_commits=failed_commits)) + + print(create_json_result( + failed_commits=failed_commits)) if __name__ == '__main__':