forked from infra/otc-metadata
Zuul jobs expect to know PDF filenames to be fetching from artifacts. Best way is to include them into the project config while managing zuul config.
305 lines
10 KiB
Python
305 lines
10 KiB
Python
#!/usr/bin/python
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
# implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
import argparse
|
|
import logging
|
|
# import os
|
|
import pathlib
|
|
import requests
|
|
import subprocess
|
|
# import warnings
|
|
|
|
from git import exc
|
|
from git import Repo
|
|
# from git import SymbolicReference
|
|
|
|
from ruamel.yaml import CommentedMap
|
|
from ruamel.yaml import YAML
|
|
|
|
import otc_metadata.services
|
|
|
|
data = otc_metadata.services.Services()
|
|
|
|
yaml = YAML()
|
|
|
|
api_session = requests.Session()
|
|
|
|
|
|
def load_zuul_config(workdir):
|
|
for path in [".zuul.yaml", "zuul.yaml"]:
|
|
test_path = pathlib.Path(workdir, path)
|
|
if test_path.exists():
|
|
with open(test_path, "r") as f:
|
|
zuul_config = yaml.load(f)
|
|
return (zuul_config, test_path)
|
|
|
|
|
|
def open_pr(args, repository, pr_data):
|
|
req = dict(
|
|
title=pr_data["title"],
|
|
body=pr_data["body"].replace("\\n", "\n"),
|
|
base=pr_data.get("base", "main"),
|
|
head=pr_data["head"],
|
|
)
|
|
if "assignees" in pr_data:
|
|
req["assignees"] = pr_data["assignees"]
|
|
if "labels" in pr_data:
|
|
req["labels"] = pr_data["labels"]
|
|
rsp = api_session.post(
|
|
f"{args.api_url}/repos/{repository}/pulls", json=req
|
|
)
|
|
if rsp.status_code != 201:
|
|
print(rsp.text)
|
|
print(f"Going to open PR with title {pr_data['title']} in {repository}")
|
|
|
|
|
|
def process_repositories(args, service):
|
|
"""Checkout repositories"""
|
|
logging.debug(f"Processing service {service}")
|
|
workdir = pathlib.Path(args.work_dir)
|
|
workdir.mkdir(exist_ok=True)
|
|
|
|
copy_to = None
|
|
# repo_to = None
|
|
|
|
for repo in service["repositories"]:
|
|
logging.debug(f"Processing repository {repo}")
|
|
repo_dir = pathlib.Path(workdir, repo["type"], repo["repo"])
|
|
|
|
if repo["environment"] != args.environment:
|
|
continue
|
|
|
|
repo_dir.mkdir(parents=True, exist_ok=True)
|
|
if repo["type"] == "gitea":
|
|
repo_url = (
|
|
f"ssh://git@gitea.eco.tsi-dev.otc-service.com:2222/"
|
|
f"{repo['repo']}"
|
|
)
|
|
elif repo["type"] == "github":
|
|
repo_url = f"git@github.com:/{repo['repo']}"
|
|
else:
|
|
logging.error(f"Repository type {repo['type']} is not supported")
|
|
exit(1)
|
|
|
|
if repo_dir.exists():
|
|
logging.debug(f"Repository {repo} already checked out")
|
|
try:
|
|
git_repo = Repo(repo_dir)
|
|
git_repo.remotes.origin.update()
|
|
git_repo.remotes.origin.fetch()
|
|
git_repo.heads.main.checkout()
|
|
git_repo.remotes.origin.pull()
|
|
except exc.InvalidGitRepositoryError:
|
|
logging.error("Existing repository checkout is bad")
|
|
repo_dir.rmdir()
|
|
|
|
if not repo_dir.exists():
|
|
try:
|
|
git_repo = Repo.clone_from(repo_url, repo_dir, branch="main")
|
|
except Exception:
|
|
logging.error(f"Error cloning repository {repo_url}")
|
|
return
|
|
|
|
branch_name = f"{args.branch_name}"
|
|
if args.branch_force:
|
|
logging.debug("Dropping current branch")
|
|
try:
|
|
git_repo.delete_head(branch_name, force=True)
|
|
except exc.GitCommandError:
|
|
pass
|
|
try:
|
|
new_branch = git_repo.create_head(branch_name, "main")
|
|
except Exception as ex:
|
|
logging.warning(f"Skipping service {service} due to {ex}")
|
|
return
|
|
new_branch.checkout()
|
|
|
|
(zuul_config, zuul_file_name) = load_zuul_config(repo_dir)
|
|
zuul_templates = None
|
|
zuul_jobs = dict()
|
|
zuul_new_jobs = list()
|
|
zuul_vars = dict()
|
|
zuul_config_updated = False
|
|
for item in zuul_config:
|
|
if "project" in item.keys():
|
|
project = item["project"]
|
|
zuul_templates = project.setdefault("templates", [])
|
|
if not zuul_templates:
|
|
zuul_templates = []
|
|
zuul_vars = project.setdefault("vars", {})
|
|
elif "job" in item.keys():
|
|
job = item["job"]
|
|
zuul_jobs[job["name"]] = job
|
|
logging.debug(f"Existing jobs {zuul_jobs}")
|
|
if "helpcenter-base-jobs" not in zuul_templates:
|
|
zuul_templates.append("helpcenter-base-jobs")
|
|
zuul_config_updated = True
|
|
|
|
job_suffix = (
|
|
"-hc-int-jobs" if args.environment == "internal" else "-hc-jobs"
|
|
)
|
|
sphinx_pdf_files = zuul_vars.setdefault('sphinx_pdf_files', [])
|
|
for doc in data.docs_by_service_type(service["service_type"]):
|
|
logging.debug(f"Analyzing document {doc}")
|
|
if not doc.get("type"):
|
|
continue
|
|
if doc["type"] == "dev":
|
|
doc_type = "dev-guide"
|
|
else:
|
|
doc_type = doc["type"]
|
|
# Collect all PDF files into sphinx_pdf_files var
|
|
pdf_name = doc.get('pdf_name')
|
|
if pdf_name and f"{pdf_name}.pdf" not in sphinx_pdf_files:
|
|
sphinx_pdf_files.append(f"{pdf_name}.pdf")
|
|
zuul_config_updated = True
|
|
|
|
template_name = f"{doc_type}{job_suffix}"
|
|
if doc_type in ["api-ref", "umn", "dev-guide"]:
|
|
if template_name not in zuul_templates:
|
|
zuul_templates.append(template_name)
|
|
else:
|
|
job_name = f"build-otc-{doc['service_type']}-{doc_type}"
|
|
if job_name not in zuul_jobs:
|
|
zuul_config_updated = True
|
|
zuul_new_jobs.append(
|
|
dict(
|
|
job=dict(
|
|
name=job_name,
|
|
parent="otc-tox-docs",
|
|
description=(
|
|
f"Build {doc_type} document using tox"
|
|
),
|
|
files=[
|
|
f"^{doc['rst_location']}/.*"
|
|
],
|
|
vars=dict(
|
|
tox_envlist=doc_type
|
|
)
|
|
)
|
|
)
|
|
)
|
|
|
|
if zuul_config_updated:
|
|
for new_job in zuul_new_jobs:
|
|
zuul_config.insert(0, new_job)
|
|
|
|
for item in zuul_config:
|
|
if "project" in item.keys():
|
|
project = item["project"]
|
|
project["templates"] = zuul_templates
|
|
project["vars"] = zuul_vars
|
|
# Ensure new jobs are in check
|
|
if len(zuul_new_jobs) > 0:
|
|
project.setdefault(
|
|
"check",
|
|
CommentedMap(jobs=[])
|
|
)
|
|
project["check"].yaml_set_comment_before_after_key(
|
|
key="jobs",
|
|
indent=6,
|
|
before=(
|
|
"Separate documents are rendered in check, "
|
|
"while published through regular "
|
|
"otc-tox-docs job part of the basic template"
|
|
)
|
|
)
|
|
project["check"]["jobs"].extend(
|
|
[x["job"]["name"] for x in zuul_new_jobs])
|
|
|
|
# yaml.indent(offset=2, sequence=2)
|
|
with open(zuul_file_name, "w") as f:
|
|
yaml.dump(zuul_config, f)
|
|
git_repo.index.add([zuul_file_name.name])
|
|
if len(git_repo.index.diff("HEAD")) == 0:
|
|
# Nothing to commit
|
|
logging.debug(
|
|
"No changes for service %s required" %
|
|
service["service_type"])
|
|
return
|
|
|
|
git_repo.index.commit(
|
|
(
|
|
"Update zuul.yaml file\n\n"
|
|
"Performed-by: gitea/infra/otc-metadata"
|
|
"/tools/update_zuul_project_config.py"
|
|
)
|
|
)
|
|
push_args = ["--set-upstream", "origin", branch_name]
|
|
if args.branch_force:
|
|
push_args.append("--force")
|
|
|
|
git_repo.git.push(*push_args)
|
|
if repo["type"] == "github":
|
|
subprocess.run(
|
|
args=["gh", "pr", "create", "-f"], cwd=copy_to, check=True
|
|
)
|
|
elif repo["type"] == "gitea":
|
|
open_pr(
|
|
args,
|
|
repo["repo"],
|
|
dict(
|
|
title="Update zuul config",
|
|
body="Update zuul config templates",
|
|
head=branch_name,
|
|
),
|
|
)
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(
|
|
description="Update zuul.yaml file in repositories."
|
|
)
|
|
parser.add_argument(
|
|
"--environment",
|
|
required=True,
|
|
help="Repository Environment",
|
|
)
|
|
parser.add_argument("--service-type", help="Service to update")
|
|
parser.add_argument(
|
|
"--work-dir",
|
|
required=True,
|
|
help="Working directory to use for repository checkout.",
|
|
)
|
|
parser.add_argument(
|
|
"--branch-name",
|
|
default="zuul",
|
|
help="Branch name to be used for synchronizing.",
|
|
)
|
|
parser.add_argument(
|
|
"--branch-force",
|
|
action="store_true",
|
|
help="Whether to force branch recreation.",
|
|
)
|
|
parser.add_argument("--token", metavar="token", help="API token")
|
|
parser.add_argument("--api-url", help="API base url of the Git hoster")
|
|
|
|
args = parser.parse_args()
|
|
logging.basicConfig(level=logging.DEBUG)
|
|
services = []
|
|
if args.service_type:
|
|
services = [data.service_dict.get(args.service_type)]
|
|
else:
|
|
services = data.all_services
|
|
|
|
if args.token:
|
|
api_session.headers.update({"Authorization": f"token {args.token}"})
|
|
|
|
for service in services:
|
|
process_repositories(args, service)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|