From c660386935d583a39ef3d027cb514bb4a239a9fe Mon Sep 17 00:00:00 2001 From: tischrei Date: Wed, 28 Sep 2022 12:04:32 +0000 Subject: [PATCH] jobs added and archived pull requests removed --- tools/attention_list.py | 40 ++++++++++++++++++++++++++++++++-------- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/tools/attention_list.py b/tools/attention_list.py index e03d91b4..8152e80a 100644 --- a/tools/attention_list.py +++ b/tools/attention_list.py @@ -21,6 +21,7 @@ import argparse import logging import requests import json +import re class FailedPR: @@ -107,6 +108,29 @@ def get_args(): ) return parser.parse_args() +def add_builds_to_obj(obj, url, tenant): + """ + This method trys to find all build jobs under a Zuul buildset. + The corresponding data like log_url and status will be added. + """ + zuul_api_url = "https://zuul.otc-service.com/api/tenant/" + tenant + "/buildset/" + final_url = re.sub('.*\/buildset\/', zuul_api_url, url) + zuul_headers = {} + zuul_headers['accept'] = 'application/json' + res_zuul = requests.request('GET', url=final_url, headers=zuul_headers) + if res_zuul.status_code != 404 and res_zuul.json(): + x = res_zuul.json() + if len(x['builds']) != 0: + jobs = [] + for build in x['builds']: + job = {} + job['uuid'] = build['uuid'] + job['name'] = build['job_name'] + job['result'] = build['result'] + job['log_url'] = build['log_url'] + jobs.append(job) + obj.jobs = jobs + return obj def get_gitea_repos(url, headers, gitea_org): """ @@ -168,6 +192,7 @@ def get_failed_gitea_commits(pull, url, gitea_org, repo, headers): res_sta = requests.request('GET', url=req_url, headers=headers) if res_sta.json(): if res_sta.json()[0]['status'] == 'failure': + zuul_url = res_sta.json()[0]['target_url'] o = FailedPR( host='gitea', url=pull['url'], @@ -175,19 +200,18 @@ def get_failed_gitea_commits(pull, url, gitea_org, repo, headers): repo=repo['name'], pullrequest=pull['title'], status=res_sta.json()[0]['status'], - zuul_url=res_sta.json()[0]['target_url'], + zuul_url=zuul_url, created_at=pull['created_at'], updated_at=res_sta.json()[0]['updated_at'], error=1000 ) + o = add_builds_to_obj(obj=o, url=zuul_url, tenant='gl') failed_commits.append(o) except Exception as e: print("An error has occured: " + str(e)) print("The request status is: " + str(res_sta.status_code) + " | " + str(res_sta.reason)) - print(json.dumps(res_sta.json())) - exit() return failed_commits def get_github_repos(url, headers, gh_org): @@ -203,7 +227,8 @@ def get_github_repos(url, headers, gh_org): res = requests.request('GET', url=req_url, headers=headers) if res.json(): for repo in res.json(): - repositories.append(repo) + if repo['archived'] == False: + repositories.append(repo) i+=1 continue else: @@ -254,6 +279,7 @@ def get_failed_gh_commits(pull, url, gh_org, repo, headers): if res_sta.json(): if len(res_sta.json()['check_runs']) != 0: if res_sta.json()['check_runs'][0]['conclusion'] == 'failure': + zuul_url = res_sta.json()['check_runs'][0]['details_url'] o = FailedPR( host='github', url=pull['html_url'], @@ -261,13 +287,13 @@ def get_failed_gh_commits(pull, url, gh_org, repo, headers): repo=repo['name'], pullrequest=pull['title'], status=res_sta.json()['check_runs'][0]['conclusion'], - zuul_url=(res_sta.json()['check_runs'] - [0]['details_url']), + zuul_url=zuul_url, created_at=pull['created_at'], updated_at=(res_sta.json()['check_runs'] [0]['completed_at']), error=1000 ) + o = add_builds_to_obj(obj=o, url=zuul_url, tenant='eco') failed_commits.append(o) else: o = FailedPR( @@ -286,8 +312,6 @@ def get_failed_gh_commits(pull, url, gh_org, repo, headers): print("An error has occured: " + str(e)) print("The request status is: " + str(res_sta.status_code) + " | " + str(res_sta.reason)) - print(json.dumps(res_sta.json())) - exit() return failed_commits