From d4de57a7f4fecfe453cb98f0fafdd9804c2dcc68 Mon Sep 17 00:00:00 2001 From: Artem Goncharov Date: Tue, 3 May 2022 07:20:12 +0200 Subject: [PATCH] add zuul jobs (#1) Enable zuul jobs Reviewed-by: OpenTelekomCloud Bot Reviewed-by: Vladimir Hasko --- .gitignore | 3 + README.md | 30 ++ docs_big-data.yaml | 14 + docs_compute.yaml | 20 ++ otc_doc_convertor/convertor.py | 52 +-- playbooks/post.yaml | 44 +++ playbooks/pre.yaml | 13 + playbooks/propose_update.yaml | 104 ++++++ playbooks/run.yaml | 75 +++++ playbooks/vars/docs_big-data.yaml | 1 + playbooks/vars/docs_compute.yaml | 1 + process.py | 310 ------------------ process_links.py | 25 -- roles/apply_doc_patch/defaults/main.yaml | 6 + roles/apply_doc_patch/tasks/main.yaml | 111 +++++++ .../apply_doc_patch/templates/pr_body.txt.j2 | 9 + roles/convert_doc/defaults/main.yaml | 1 + roles/convert_doc/tasks/main.yaml | 4 + roles/fetch_doc_artifacts/defaults/main.yaml | 3 + roles/fetch_doc_artifacts/tasks/doc.yaml | 43 +++ roles/fetch_doc_artifacts/tasks/main.yaml | 19 ++ roles/fetch_doc_artifacts/tasks/patch.yaml | 31 ++ roles/generate_doc_patch/tasks/main.yaml | 21 ++ tox.ini | 40 +++ zuul.yaml | 105 +++++- 25 files changed, 722 insertions(+), 363 deletions(-) create mode 100644 docs_big-data.yaml create mode 100644 docs_compute.yaml create mode 100644 playbooks/post.yaml create mode 100644 playbooks/pre.yaml create mode 100644 playbooks/propose_update.yaml create mode 100644 playbooks/run.yaml create mode 120000 playbooks/vars/docs_big-data.yaml create mode 120000 playbooks/vars/docs_compute.yaml delete mode 100644 process.py delete mode 100644 process_links.py create mode 100644 roles/apply_doc_patch/defaults/main.yaml create mode 100644 roles/apply_doc_patch/tasks/main.yaml create mode 100644 roles/apply_doc_patch/templates/pr_body.txt.j2 create mode 100644 roles/convert_doc/defaults/main.yaml create mode 100644 roles/convert_doc/tasks/main.yaml create mode 100644 roles/fetch_doc_artifacts/defaults/main.yaml create mode 100644 roles/fetch_doc_artifacts/tasks/doc.yaml create mode 100644 roles/fetch_doc_artifacts/tasks/main.yaml create mode 100644 roles/fetch_doc_artifacts/tasks/patch.yaml create mode 100644 roles/generate_doc_patch/tasks/main.yaml create mode 100644 tox.ini diff --git a/.gitignore b/.gitignore index a8e1a2d4..ead078c4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +AUTHORS +ChangeLog + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/README.md b/README.md index 106d3228..e0c53931 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,32 @@ # doc-exports Repository to track document exports, not for public use + +Document sources (in HTML) are placed under docs// + +In the there are files docs_.yaml that +describe required data for the particular document. For +all of the documents configured in those files conversion +from HTML to RST will be attempted. In addition to that a +patch file (changes in the RST format) will be generated. + +## Automatic pull request proposal + +Change for any of the documents configured in the +docs_.yaml would trigger job to propose +corresponding changes to the target repository. This is +implemented in the following way: + +- HTML content from the pull request will be converted + with the latest conversion script (otc_doc_convertor) to +RST (new) + +- HTML content for the same document from current main + branch with the corresponding version of the conversion +script will be converted to RST (base). + +- **New** state is compared against **base** and + corresponding patch file is generated + +- Resulting patch file is force-applied in the target + repository (skipping all conflicts) and PR is created + diff --git a/docs_big-data.yaml b/docs_big-data.yaml new file mode 100644 index 00000000..960da734 --- /dev/null +++ b/docs_big-data.yaml @@ -0,0 +1,14 @@ +--- +docs: + - label: ma_umn + service: ma + type: umn + html_location: docs/modelarts/umn + repository: opentelekomcloud-docs/modelarts + project_location: /umn/source + - label: ma_api + service: ma + type: api-ref + html_location: docs/modelarts/api-ref + repository: opentelekomcloud-docs/modelarts + project_location: /api-ref/source diff --git a/docs_compute.yaml b/docs_compute.yaml new file mode 100644 index 00000000..5e79ba43 --- /dev/null +++ b/docs_compute.yaml @@ -0,0 +1,20 @@ +--- +docs: + - label: ecs_umn + service: ecs + type: umn + html_location: docs/ecs/umn + repository: opentelekomcloud-docs/elastic-cloud-server + project_location: /umn/source + - label: ecs_api + service: ecs + type: api-ref + html_location: docs/ecs/api-ref + repository: opentelekomcloud-docs/elastic-cloud-server + project_location: /api-ref/source + - label: ecs_dev + service: ecs + type: dev-guide + html_location: docs/ecs/dev + repository: opentelekomcloud-docs/elastic-cloud-server + project_location: /dev_guide/source diff --git a/otc_doc_convertor/convertor.py b/otc_doc_convertor/convertor.py index 8b28f924..daed81a4 100644 --- a/otc_doc_convertor/convertor.py +++ b/otc_doc_convertor/convertor.py @@ -179,22 +179,22 @@ class OTCDocConvertor: for li in soup.body.find_all("li"): del li['id'] - # for pre in soup.body.find_all("pre"): - # text = pre.get_text() - # # if text.startswith("{"): - # # pre["class"] = "data" - # if re.search( - # r'\[[a-z]*@\w+.*\][\s#>]?', - # text - # ): - # # Something like "[root@ecs-test-0001 ~]#" - # pre["class"] = "console" - # elif re.match( - # r'^(GET|PUT|POST|DELETE)', - # text - # ): - # # Something like "DELETE https://some_url" - # pre["class"] = "text" + for pre in soup.body.find_all("pre"): + text = pre.get_text() + # if text.startswith("{"): + # pre["class"] = "data" + if re.search( + r'\[[a-z]*@\w+.*\][\s#>]?', + text + ): + # Something like "[root@ecs-test-0001 ~]#" + pre["class"] = "console" + elif re.match( + r'^(GET|PUT|POST|DELETE)', + text + ): + # Something like "DELETE https://some_url" + pre["class"] = "text" # And now specialities rawize_strings = [ @@ -344,7 +344,7 @@ class OTCDocConvertor: open(pathlib.Path(dest, target_path, f"{target}.rst"), 'w') as writer: logging.info(f"Post processing {target}") - # writer.write(f":original_name: {f.name}\n\n") + writer.write(f":original_name: {f.name}\n\n") # Add root file label writer.write(f".. _{f.name.replace('.html', '')}:\n\n") # post process some usual stuff @@ -363,15 +363,15 @@ class OTCDocConvertor: processed_line = re.sub( r'.. code:: screen$', r'.. code-block::', processed_line) - # for lexer in ["json", "bash", "text", "console"]: - # processed_line = re.sub( - # f".. code:: {lexer}$", - # f".. code-block:: {lexer}", processed_line) - # if re.match(rf".. code:: {lexer}\s", processed_line): - # logging.error( - # f"'code-block: {lexer}' with something " - # "afterwards") - # exit(1) + for lexer in ["json", "bash", "text", "console"]: + processed_line = re.sub( + f".. code:: {lexer}$", + f".. code-block:: {lexer}", processed_line) + if re.match(rf".. code:: {lexer}\s", processed_line): + logging.error( + f"'code-block: {lexer}' with something " + "afterwards") + exit(1) # spaces are important, since code-block may reside inside # of the cell processed_line = re.sub( diff --git a/playbooks/post.yaml b/playbooks/post.yaml new file mode 100644 index 00000000..0feb96fc --- /dev/null +++ b/playbooks/post.yaml @@ -0,0 +1,44 @@ +--- +- hosts: localhost + vars: + vault_addr: "{{ zuul_vault_addr }}" + vault_secret_dest: "{{ zuul.executor.work_root }}/.approle-secret" + vault_token_dest: "{{ zuul.executor.work_root }}/.approle-token" + + roles: + # Get the Vault token from prepared secret-id + - role: create-vault-approle-token + vault_role_id: "{{ zuul_vault.vault_role_id }}" + vault_wrapping_token_id: "{{ lookup('file', vault_secret_dest) }}" + +- hosts: all + vars: + vault_token_dest: "{{ zuul.executor.work_root }}/.approle-token" + vault_addr: "{{ zuul_vault_addr }}" + tasks: + + - name: Fetch organization tokens + no_log: true + check_mode: false + ansible.builtin.uri: + url: "{{ vault.vault_addr }}/v1/{{ vault.vault_token_path }}" + headers: + "X-Vault-Token": "{{ lookup('file', vault_token_dest) }}" + method: "POST" + body: + org_name: "opentelekomcloud-docs" + body_format: "json" + register: "org_token" + + - name: Revoke GitHub token lease + check_mode: false + no_log: true + uri: + url: "{{ vault.vault_addr }}/v1/sys/leases/revoke" + headers: + "X-Vault-Token": "{{ vault.vault_token }}" + method: "PUT" + body: + lease_id: "{{ org_token.json.lease_id }}" + body_format: "json" + status_code: 204 diff --git a/playbooks/pre.yaml b/playbooks/pre.yaml new file mode 100644 index 00000000..1208fdcc --- /dev/null +++ b/playbooks/pre.yaml @@ -0,0 +1,13 @@ +--- +- hosts: all + roles: + - ensure-python + - ensure-virtualenv + - ensure-gh + tasks: + - name: Install convertor + pip: + chdir: "{{ zuul.project.src_dir }}" + virtualenv: "{{ ansible_user_dir }}/.venv" + name: . + editable: "yes" diff --git a/playbooks/propose_update.yaml b/playbooks/propose_update.yaml new file mode 100644 index 00000000..97a9893f --- /dev/null +++ b/playbooks/propose_update.yaml @@ -0,0 +1,104 @@ +--- +- hosts: localhost + vars: + vault_addr: "{{ zuul_vault_addr }}" + vault_secret_dest: "{{ zuul.executor.work_root }}/.approle-secret" + vault_token_dest: "{{ zuul.executor.work_root }}/.approle-token" + github_token_dest: "{{ zuul.executor.work_root }}/.github" + github_token_lease: "{{ zuul.executor.work_root }}/.github_lease" + + roles: + # Get the Vault token from prepared secret-id + - role: create-vault-approle-token + vault_role_id: "{{ zuul_vault.vault_role_id }}" + vault_wrapping_token_id: "{{ lookup('file', vault_secret_dest) }}" + + tasks: + + - name: Fetch organization tokens + no_log: true + check_mode: false + ansible.builtin.uri: + url: "{{ vault_addr }}/v1/{{ zuul_vault.vault_token_path }}" + headers: + "X-Vault-Token": "{{ lookup('file', vault_token_dest) }}" + method: "POST" + body: + org_name: "opentelekomcloud-docs" + body_format: "json" + register: "org_token" + + - name: Write GitHub token into the destination + no_log: true + copy: + content: "{{ org_token.json.data.token }}" + dest: "{{ github_token_dest }}" + mode: "0400" + when: + - "vault_token_dest is defined" + - "org_token is defined" + - "org_token.json is defined" + - "org_token.json.data is defined" + - "org_token.json.data.token is defined" + + - name: Save token lease + no_log: true + copy: + content: "{{ org_token.json.lease_id }}" + dest: "{{ github_token_lease }}" + mode: "0400" + when: + - "vault_token_dest is defined" + - "org_token is defined" + - "org_token.json is defined" + + +- hosts: all + vars: + github_token_dest: "{{ zuul.executor.work_root }}/.github" + github_token: "{{ lookup('file', github_token_dest) }}" + vault_addr: "{{ zuul_vault_addr }}" + vault_token_file: "{{ zuul.executor.work_root }}/.approle-token" + github_token_lease: "{{ zuul.executor.work_root }}/.github_lease" + environment: + GITHUB_TOKEN: "{{ github_token }}" + tasks: + + - name: Attempt to gracefully apply patch + block: + - name: Read project docs configuration + include_vars: "{{ docs_update_data_file }}" + + - name: Configure git user name + command: "git config --global user.name 'OpenTelekomCloud Proposal Bot'" + + - name: Configure git user email + command: "git config --global user.email 52695153+otcbot@users.noreply.github.com" + + - name: Process patch + include_role: + name: "apply_doc_patch" + vars: + doc_label: "{{ doc.label }}" + apply_doc_patch_repository: "{{ doc.repository }}" + apply_doc_patch_patch_file: "{{ ansible_user_dir }}/{{ doc.label }}.patch" + apply_doc_patch_target_location: "{{ doc.project_location }}" + loop: "{{ docs }}" + loop_control: + loop_var: "doc" + label: "{{ doc.label }}" + + always: + - name: Revoke GitHub token lease + check_mode: false + no_log: true + delegate_to: localhost + uri: + url: "{{ vault_addr }}/v1/sys/leases/revoke" + headers: + "X-Vault-Token": "{{ lookup('file', vault_token_file) }}" + method: "PUT" + body: + lease_id: "{{ lookup('file', github_token_lease) }}" + body_format: "json" + status_code: 204 diff --git a/playbooks/run.yaml b/playbooks/run.yaml new file mode 100644 index 00000000..9752ddf8 --- /dev/null +++ b/playbooks/run.yaml @@ -0,0 +1,75 @@ +--- +- hosts: all + vars: + docs_rst_location: "docs" + docs_base_location: "base" + docs_new_location: "new" + + tasks: + - name: Read project docs configuration + include_vars: "{{ docs_update_data_file }}" + + - name: Detect list of changes + command: "git log -1 --name-only --pretty=" + args: + chdir: "{{ zuul.project.src_dir }}" + register: git_log + ignore_errors: true + + - name: Generate new RSTs + include_role: + name: "convert_doc" + vars: + doc_label: "{{ doc.label }}" + dest: "{{ ansible_user_dir }}/{{ docs_rst_location }}/{{ doc.label }}/{{ docs_new_location }}" + source: "{{ zuul.project.src_dir }}/{{ doc.html_location }}" + loop: "{{ docs }}" + loop_control: + loop_var: "doc" + label: "{{ doc.label }}" + when: "git_log.stdout is search(doc.html_location) or git_log.stdout is search('otc_doc_convertor')" + + - name: Restore to the previous git state + command: "git checkout HEAD^1" + args: + chdir: "{{ zuul.project.src_dir }}" + + - name: Generate base RSTs + include_role: + name: "convert_doc" + vars: + doc_label: "{{ doc.label }}" + dest: "{{ ansible_user_dir }}/{{ docs_rst_location }}/{{ doc.label }}/{{ docs_base_location }}" + source: "{{ zuul.project.src_dir }}/{{ doc.html_location }}" + loop: "{{ docs }}" + loop_control: + loop_var: "doc" + label: "{{ doc.label }}" + when: "git_log.stdout is search(doc.html_location) or git_log.stdout is search('otc_doc_convertor')" + + - name: Generate patch file + include_role: + name: "generate_doc_patch" + vars: + doc_label: "{{ doc.label }}" + generate_doc_patch_base_location: "{{ docs_base_location }}" + generate_doc_patch_new_location: "{{ docs_new_location }}" + generate_doc_patch_base: "{{ ansible_user_dir }}/{{ docs_rst_location }}/{{ doc.label }}" + generate_doc_patch_patch_location: "{{ ansible_user_dir }}/{{ doc.label }}.patch" + loop: "{{ docs }}" + loop_control: + loop_var: "doc" + label: "{{ doc.label }}" + when: "git_log.stdout is search(doc.html_location) or git_log.stdout is search('otc_doc_convertor')" + + - name: Fetch generated artifacts + include_role: + name: "fetch_doc_artifacts" + vars: + fetch_doc_artifacts_name: "{{ doc.label }}" + fetch_doc_artifacts_rst_source: "{{ ansible_user_dir }}/{{ docs_rst_location }}/{{ doc.label }}/{{ docs_new_location }}" + fetch_doc_artifacts_patch: "{{ ansible_user_dir }}/{{ doc.label }}.patch" + loop: "{{ docs }}" + loop_control: + loop_var: "doc" + label: "{{ doc.label }}" diff --git a/playbooks/vars/docs_big-data.yaml b/playbooks/vars/docs_big-data.yaml new file mode 120000 index 00000000..3914d608 --- /dev/null +++ b/playbooks/vars/docs_big-data.yaml @@ -0,0 +1 @@ +../../docs_big-data.yaml \ No newline at end of file diff --git a/playbooks/vars/docs_compute.yaml b/playbooks/vars/docs_compute.yaml new file mode 120000 index 00000000..c09619fe --- /dev/null +++ b/playbooks/vars/docs_compute.yaml @@ -0,0 +1 @@ +../../docs_compute.yaml \ No newline at end of file diff --git a/process.py b/process.py deleted file mode 100644 index 886df989..00000000 --- a/process.py +++ /dev/null @@ -1,310 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import bs4 -import json -import os -import pathlib -import re -import subprocess -import warnings - - -def get_new_name(current_name): - new_name = current_name.replace(' - ', '_') - new_name = new_name.replace(' ', '_') - new_name = new_name.replace('/', '_') - new_name = new_name.replace('\'', '') - new_name = new_name.replace('"', '') - new_name = new_name.replace('`', '') - new_name = new_name.replace('´', '') - new_name = new_name.replace(':', '') - new_name = new_name.replace('?', '') - new_name = new_name.replace('(', '') - new_name = new_name.replace(')', '') - new_name = new_name.lower() - return new_name - - -def get_target_path(code, metadata, path=''): - if code in metadata: - current = metadata[code] - if not current.get('p_code'): - return current['new_name'] - else: - return ( - "{0}/{1}".format( - get_target_path(current['p_code'], metadata), - current['new_name']) - ) - else: - return '' - - -def build_doc_tree(metadata): - flat_tree = dict() - for k, v in metadata.items(): - parent_id = v.get('p_code') - if not parent_id: - parent_id = 0 - - if parent_id not in flat_tree: - flat_tree[parent_id] = list() - flat_tree[parent_id].append(v) - return flat_tree - - -def flatten_html(soup, args): - for i in soup.body.find_all('div'): - if "note" in i.get('class', []): - del i['id'] - if i.img: - i.img.decompose() - notetitle = i.find('span', class_='notetitle') - if notetitle: - title = soup.new_tag('div') - title['class'] = 'title' - title.string = 'Note:' - notetitle.replace_with(title) - elif "notice" in i.get('class', []): - del i['id'] - if i.img: - i.img.decompose() - i['class'] = 'important' - elif "caution" in i.get('class', []): - del i['id'] - if i.img: - i.img.decompose() - elif "fignone" in i.get('class', []): - figure = soup.new_tag('figure') - img = i.find('img') - cap = i.find('span', class_='figcap') - if cap is not None: - cap.name = 'figcaption' - figure.append(cap) - if img: - img['src'] = '/_static/images/' + img['src'] - figure.append(img) - i.replace_with(figure) - else: - i.name = 'p' - if args.improve_table_headers: - for th in soup.body.find_all('th'): - if hasattr(th, 'p') and th.p.string: - th.p.string = re.sub( - r'\b/\b', - ' / ', - th.p.string) - for tbl in soup.body.find_all('table'): - tbl_id = tbl.get('id') - if tbl_id: - tbl['id'] = re.sub('[-_]', '', tbl_id) - for lnk in soup.body.find_all("a"): - if ( - lnk.string - and re.match(r'\d+', lnk.string) - and lnk['href'].startswith('#') - ): - lnk.unwrap() - - return soup.body - - -def main(): - parser = argparse.ArgumentParser(description='Process links.') - parser.add_argument( - 'path', type=str, help='path to the files') - parser.add_argument( - '--improve-table-headers', action='store_true', - help='Improve table headers by enforcing spaces around `/`') - args = parser.parse_args() - retval = os.getcwd() - os.chdir(args.path) - meta_data = json.loads(open("CLASS.TXT.json").read()) - metadata_by_uri = dict() - metadata_by_code = dict() - rename_matrix = dict() - table_re = re.compile(r'.*]+ id="([^"]+)"') - for f in meta_data: - f['new_name'] = get_new_name(f['title']) - metadata_by_uri[f['uri']] = f - metadata_by_code[f.get('code')] = f - - tree = build_doc_tree(metadata_by_code) - - for f in meta_data: - # Construct link renaming matrix - target_path = get_target_path(f['p_code'], metadata_by_code) - if target_path: - target_path += '/' - name = f["new_name"] if not tree.get(f['code']) else f"{f['new_name']}/index" - rename_matrix[f['uri']] = f"{target_path}{name}.html" - - pathlib.Path("temp/").mkdir(parents=True, exist_ok=True) - docs_anchors = dict() - - for f in pathlib.Path().glob("*.html"): - if f.name not in metadata_by_uri: - continue - # Registering section links - with open(f, 'r') as reader: - print(f"scanning {f.name}") - content = reader.read() - soup = bs4.BeautifulSoup(content, "lxml") - for lnk in soup.body.find_all('div', class_='section'): - title = lnk.find('h4') - anchor = None - if title.string: - anchor = title.string - elif title.strings: - anchor = ''.join(title.strings) - if anchor: - title = re.sub('[ _:]', '-', anchor) - res = dict( - fname=f.name, - title=title, - replace=title.lower() - ) - docs_anchors[lnk.get('id')] = res - - for f in pathlib.Path().glob("*.html"): - if f.name not in metadata_by_uri: - continue - _target = metadata_by_uri[f.name] - target = _target['new_name'] - target_path = get_target_path(_target['p_code'], metadata_by_code) - target_deepness = target_path.count('/') + 1 - if not _target['p_code']: - # we only +1 if we are not on the same level - target_deepness = 0 - pathlib.Path("temp/").mkdir(parents=True, exist_ok=True) - pathlib.Path("tmp_result/" + target_path).mkdir( - parents=True, exist_ok=True) - pathlib.Path("result/" + target_path).mkdir( - parents=True, exist_ok=True) - - # Pre-processing of html content - with open(f, 'r') as reader, open(f"temp/{target}.tmp", 'w') as writer: - print(f"Processing {target}") - doc_anchors = dict() - content = reader.read() - soup = bs4.BeautifulSoup(content, "lxml") - proc = flatten_html(soup, args) - # Fix cross links - for lnk in proc.find_all("a"): - href = lnk.get('href') - if href: - # Drop anchor links to "Figure"s - if ( - (lnk.content and lnk.content.startswith('Figure')) - or - ( - lnk.contents - and ''.join(lnk.contents).startswith('Figure') - ) - ): - lnk.unwrap() - else: - page_url = '' - anchor = '' - href_parts = href.split('#') - if href_parts[0] in rename_matrix: - page_url = ('../' * target_deepness) + \ - rename_matrix[href_parts[0]] - else: - page_url = href_parts[0] - if len(href_parts) > 1: - anchor = href_parts[1] - if anchor in docs_anchors: - anchor = docs_anchors[anchor]['replace'] - else: - anchor = re.sub('[-_]', '', anchor).lower() - lnk['href'] = f"{page_url}#{anchor}" - else: - lnk['href'] = lnk['href'].replace( - href_parts[0], - page_url) - if not href: - lnk_name = lnk.get('name') - if ( - lnk_name and not lnk.string - and lnk_name not in doc_anchors - ): - lnk['id'] = lnk_name - doc_anchors[lnk_name] = 1 - - for line in str(proc).splitlines(): - table_match = table_re.match(line) - if table_match: - writer.write(f".. _{table_match.group(1)}:\n\n") - if not line.startswith("Parent topic:"): - processed_line = line - writer.write(processed_line + '\n') - # Convert html to rst - os.system( - f"pandoc 'temp/{target}.tmp' -f html " - f"-o 'tmp_result/{target_path}/{target}.rst' " - f"--ascii -s --wrap none" - ) - # Post processing of rendered rst - with ( - open(f"tmp_result/{target_path}/{target}.rst", 'r') as reader, - open(f"result/{target_path}/{target}.rst", 'w') as writer - ): - print(f"Post processing {target}") - for line in reader.readlines(): - processed_line = re.sub(r'\.\. \\_', '\n\n.. _', line) - processed_line = re.sub(r'√', 'Y', processed_line) - processed_line = re.sub( - r'public_sys-resources/', '', processed_line) - processed_line = re.sub( - r'image:: ', 'image:: /_static/images/', processed_line) - processed_line = re.sub( - r' :name: .*$', '', processed_line) - processed_line = re.sub( - r'\*\*Parent topic:.*$', '', processed_line) - processed_line = re.sub( - r'.. code:: screen$', - r'.. code-block::', processed_line) - # spaces are important, since code-block may be inside of the - # cell - processed_line = re.sub( - r'.. code:: screen\s', - r'.. code-block:: ', processed_line) - processed_line = re.sub( - r'.. code:: codeblock$', - r'.. code-block::', processed_line) - writer.write(processed_line) - # Generate indexes - for k, v in tree.items(): - path = '' - title = 'Main Index' - if k != 0: - curr = metadata_by_code[k] - title = curr['title'] - path = get_target_path(curr['code'], metadata_by_code) - with open(f"result/{path}/index.rst", "w") as index: - index.write('='*(len(title)) + '\n') - index.write(title + '\n') - index.write('='*(len(title)) + '\n') - index.write('\n') - index.write('.. toctree::\n') - index.write(' :maxdepth: 1\n\n') - for child in v: - new_name = child['new_name'] - if child['code'] in tree: - # If this is folder - add /index - new_name = new_name + '/index' - index.write(f" {new_name}\n") - - p = pathlib.Path(f"result/{path}.rst") - if p.exists(): - print(f"{p.resolve()} is removed in favour" - f" of result/{path}/index.rst") - p.unlink() - - os.chdir(retval) - - -if __name__ == "__main__": - main() diff --git a/process_links.py b/process_links.py deleted file mode 100644 index 3ab1e228..00000000 --- a/process_links.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python3 - -import json -import argparse -import subprocess - - -def main(): - parser = argparse.ArgumentParser(description='Process links.') - parser.add_argument( - 'path', type=str, help='path to the files') - args = parser.parse_args() - matrix = json.loads(open("matrix.json").read()) - for k, v in matrix.items(): - replace = v.replace('/', '\/') - subprocess.run( - f"find {args.path} -name *'.rst' -type f -print0 | xargs" - f" -0 sed -i '' 's/{k}/{replace}/g'", - shell=True - ) - print(k, v) - - -if __name__ == "__main__": - main() diff --git a/roles/apply_doc_patch/defaults/main.yaml b/roles/apply_doc_patch/defaults/main.yaml new file mode 100644 index 00000000..2f9fd362 --- /dev/null +++ b/roles/apply_doc_patch/defaults/main.yaml @@ -0,0 +1,6 @@ +apply_doc_patch_gh: "/usr/local/bin/gh" +apply_doc_patch_branch_name: "import-{{ doc_label }}-{{ zuul.change }}" +apply_doc_patch_pr_title: | + Changes to {{ doc_label }} from doc-exports#{{ zuul.change }} +apply_doc_patch_pr_body_file: "{{ ansible_user_dir }}/{{ doc_label }}_pr_body.txt" +apply_doc_patch_pr_label: "autoPR" diff --git a/roles/apply_doc_patch/tasks/main.yaml b/roles/apply_doc_patch/tasks/main.yaml new file mode 100644 index 00000000..ef60492b --- /dev/null +++ b/roles/apply_doc_patch/tasks/main.yaml @@ -0,0 +1,111 @@ +--- +- name: Check patch presense + ansible.builtin.stat: + path: "{{ apply_doc_patch_patch_file }}" + register: "patch_stat" + +- name: Clone Repository + no_log: true + command: "git clone https://x-access-token:{{ github_token }}@github.com/{{ apply_doc_patch_repository }} {{ doc_label }}" + args: + chdir: "{{ ansible_user_dir }}" + when: "patch_stat.stat.exists" + +- name: Try to apply patch + command: "patch -p1 -N -f -i {{ apply_doc_patch_patch_file }}" + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}/{{ apply_doc_patch_target_location }}" + ignore_errors: true + when: "patch_stat.stat.exists" + +- name: Find patch backup and reject files + find: + paths: "{{ ansible_user_dir }}/{{ doc_label }}/{{ apply_doc_patch_target_location }}" + patterns: "^.*?\\.(?:rej)$" + use_regex: true + recurse: true + register: rej_files + when: "patch_stat.stat.exists" + +- name: Drop all interim patch files + command: "find . \\( -name '*.rst.orig' -or -name '*.rst.rej' \\) -exec rm {} \\;" + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}/{{ apply_doc_patch_target_location }}" + register: drop_failed + when: "patch_stat.stat.exists" + +- name: Perform git diff to see changes + command: "git diff" + register: diff_output + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}/{{ apply_doc_patch_target_location }}" + when: "patch_stat.stat.exists" + +- name: Checkout branch + command: "git checkout -b {{ apply_doc_patch_branch_name }}" + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}" + when: + - "patch_stat.stat.exists" + - "diff_output.stdout != ''" + +- name: Stage files + command: "git add ." + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}" + when: + - "patch_stat.stat.exists" + - "diff_output.stdout != ''" + +# Maybe put some reference in the commit message +- name: Commit files + command: "git commit -m 'Update content'" + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}" + when: + - "patch_stat.stat.exists" + - "diff_output.stdout != ''" + +- name: Push changes + command: "git push -u origin {{ apply_doc_patch_branch_name }} --force" + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}" + when: + - "patch_stat.stat.exists" + - "diff_output.stdout != ''" + +# Gists are not available by GitHub App, do not try it now +# - name: Create patch gist +# command: "{{ apply_doc_patch_gh }} gist create {{ apply_doc_patch_patch_file }} -d '{{ doc_label }} patch file for opentelekomcloud-docs/doc-exports#{{ zuul.change }}'" +# register: gh_gist +# args: +# chdir: "{{ ansible_user_dir }}/{{ doc_label }}" +# when: +# - "patch_stat.stat.exists" +# - "diff_output.stdout != ''" + +- name: Generate PR text + template: + dest: "{{ apply_doc_patch_pr_body_file }}" + src: "pr_body.txt.j2" + when: + - "patch_stat.stat.exists" + - "diff_output.stdout != ''" + +- name: Search existing Pull Request + command: "{{ apply_doc_patch_gh }} pr list --search 'head:{{ apply_doc_patch_branch_name }}' --json id" + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}" + register: "existing_pr" + when: + - "patch_stat.stat.exists" + - "diff_output.stdout != ''" + +- name: Open Pull Request + command: "{{ apply_doc_patch_gh }} pr create -B main -F {{ apply_doc_patch_pr_body_file }} -t '{{ apply_doc_patch_pr_title }}' -l {{ apply_doc_patch_pr_label }}" + args: + chdir: "{{ ansible_user_dir }}/{{ doc_label }}" + when: + - "patch_stat.stat.exists" + - "diff_output.stdout != ''" + - "existing_pr.stdout == '[]'" diff --git a/roles/apply_doc_patch/templates/pr_body.txt.j2 b/roles/apply_doc_patch/templates/pr_body.txt.j2 new file mode 100644 index 00000000..e1b36630 --- /dev/null +++ b/roles/apply_doc_patch/templates/pr_body.txt.j2 @@ -0,0 +1,9 @@ +This is an automatically created Pull Request for changes to {{ doc_label }} in opentelekomcloud-docs/doc-exports#{{ zuul.change }}. + +Please do not edit it manually, since update to the original PR will overwrite local changes. + +Original patch file, as well as complete rst archive, can be found in the artifacts of the opentelekomcloud-docs/doc-exports#{{ zuul.change }} + +{% if rej_files.matched > 0 %} +There were patch conflicts, please review original patch file. +{% endif %} diff --git a/roles/convert_doc/defaults/main.yaml b/roles/convert_doc/defaults/main.yaml new file mode 100644 index 00000000..844e92e4 --- /dev/null +++ b/roles/convert_doc/defaults/main.yaml @@ -0,0 +1 @@ +zuul_work_virtualenv: "{{ ansible_user_dir }}/.venv" diff --git a/roles/convert_doc/tasks/main.yaml b/roles/convert_doc/tasks/main.yaml new file mode 100644 index 00000000..18292551 --- /dev/null +++ b/roles/convert_doc/tasks/main.yaml @@ -0,0 +1,4 @@ +- name: Convert {{ doc_label | default('') }} HTML to RST + args: + executable: "/bin/bash" + shell: "source {{ zuul_work_virtualenv }}/bin/activate; otc-convert-doc --dest {{ dest }} {{ source }}" diff --git a/roles/fetch_doc_artifacts/defaults/main.yaml b/roles/fetch_doc_artifacts/defaults/main.yaml new file mode 100644 index 00000000..7cc13448 --- /dev/null +++ b/roles/fetch_doc_artifacts/defaults/main.yaml @@ -0,0 +1,3 @@ +zuul_executor_dest: "{{ zuul.executor.log_root }}" +zuul_output_dir: "{{ ansible_user_dir }}/zuul-output" +zuul_use_fetch_output: "{{ zuul_site_use_fetch_output|default(false) }}" diff --git a/roles/fetch_doc_artifacts/tasks/doc.yaml b/roles/fetch_doc_artifacts/tasks/doc.yaml new file mode 100644 index 00000000..90761ae0 --- /dev/null +++ b/roles/fetch_doc_artifacts/tasks/doc.yaml @@ -0,0 +1,43 @@ +- name: Create temporary archive file + tempfile: + state: file + suffix: ".tar.gz" + register: doc_archive + +- name: Archive Doc + command: "tar -f {{ doc_archive.path }} -C {{ fetch_doc_artifacts_rst_source }} -cz ." + args: + warn: false + +- block: + - name: Fetch archive + synchronize: + dest: "{{ zuul.executor.log_root }}/{{ fetch_doc_artifacts_name }}.tar.gz" + mode: pull + src: "{{ doc_archive.path }}" + verify_host: true + owner: no + group: no + when: not zuul_use_fetch_output + +- block: + - name: Copy archive + copy: + dest: "{{ zuul_output_dir }}/logs/{{ fetch_doc_artifacts_name }}.tar.gz" + src: "{{ doc_archive.path }}" + mode: 0644 + remote_src: true + when: zuul_use_fetch_output + +- name: Return artifact to Zuul + zuul_return: + data: + zuul: + artifacts: + - name: "{{ fetch_doc_artifacts_name }} archive" + url: "{{ fetch_doc_artifacts_name }}.tar.gz" + metadata: + type: docs_archive + doc_service: "{{ doc.service }}" + doc_type: "{{ doc.type }}" + doc_label: "{{ fetch_doc_artifacts_name }}" diff --git a/roles/fetch_doc_artifacts/tasks/main.yaml b/roles/fetch_doc_artifacts/tasks/main.yaml new file mode 100644 index 00000000..6b7377fd --- /dev/null +++ b/roles/fetch_doc_artifacts/tasks/main.yaml @@ -0,0 +1,19 @@ +- name: Inspect {{ fetch_doc_artifacts_name }} build directory + find: + file_type: any + follow: true + paths: "{{ fetch_doc_artifacts_rst_source }}" + register: doc_dir + +- name: Process {{ fetch_doc_artifacts_name }} archive + when: "doc_dir.matched > 0" + include_tasks: doc.yaml + +- name: Find {{ fetch_doc_artifacts_name }} Patch + stat: + path: "{{ fetch_doc_artifacts_patch }}" + register: doc_patch + +- name: Process {{ fetch_doc_artifacts_name }} Patch file + when: "doc_patch.stat.exists" + include_tasks: patch.yaml diff --git a/roles/fetch_doc_artifacts/tasks/patch.yaml b/roles/fetch_doc_artifacts/tasks/patch.yaml new file mode 100644 index 00000000..c9bd5208 --- /dev/null +++ b/roles/fetch_doc_artifacts/tasks/patch.yaml @@ -0,0 +1,31 @@ +- block: + - name: Fetch archive + synchronize: + dest: "{{ zuul.executor.log_root }}/{{ fetch_doc_artifacts_name }}.patch" + mode: pull + src: "{{ fetch_doc_artifacts_patch }}" + verify_host: true + owner: no + group: no + when: not zuul_use_fetch_output + +- block: + - name: Copy archive + copy: + dest: "{{ zuul_output_dir }}/logs/{{ fetch_doc_artifacts_name }}.patch" + src: "{{ fetch_doc_artifacts_patch }}" + mode: 0644 + remote_src: true + when: zuul_use_fetch_output + +- name: Return artifact to Zuul + zuul_return: + data: + zuul: + artifacts: + - name: "{{ fetch_doc_artifacts_name }} patch" + url: "patches/{{ fetch_doc_artifacts_name }}.patch" + metadata: + type: docs_patch + doc_service: "{{ doc.service }}" + doc_type: "{{ doc.type }}" diff --git a/roles/generate_doc_patch/tasks/main.yaml b/roles/generate_doc_patch/tasks/main.yaml new file mode 100644 index 00000000..87497bdf --- /dev/null +++ b/roles/generate_doc_patch/tasks/main.yaml @@ -0,0 +1,21 @@ +--- +- name: Check base state for {{ doc_label }} + stat: + path: "{{ generate_doc_patch_base }}/{{ generate_doc_patch_base_location }}" + register: base_stat + +- name: Check new state for {{ doc_label }} + stat: + path: "{{ generate_doc_patch_base }}/{{ generate_doc_patch_new_location }}" + register: new_stat + +- name: Generate {{ doc_label }} patch file + shell: "diff -ruN {{ generate_doc_patch_base_location }} {{ generate_doc_patch_new_location }} > {{ generate_doc_patch_patch_location }}" + args: + chdir: "{{ generate_doc_patch_base }}" + register: diff_cmd + failed_when: "diff_cmd.rc >= 2" + changed_when: "diff_cmd.rc == 1" + when: + - "base_stat.stat.exists" + - "new_stat.stat.exists" diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..07f209c7 --- /dev/null +++ b/tox.ini @@ -0,0 +1,40 @@ +[tox] +minversion = 3.6 +envlist = pep8 +skipsdist = True +ignore_basepython_conflict = True + +[testenv] +usedevelop = True +install_command = pip install {opts} {packages} +setenv = + VIRTUAL_ENV={envdir} + LANG=en_US.UTF-8 + LANGUAGE=en_US:en + LC_ALL=C +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +[testenv:pep8] +commands = + flake8 + +[testenv:venv] +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt +commands = {posargs} + +[flake8] +# The following are ignored on purpose. It's not super worth it to fix them. +# However, if you feel strongly about it, patches will be accepted to fix them +# if they fix ALL of the occurances of one and only one of them. +# H306 Is about alphabetical imports - there's a lot to fix. +# H4 Are about docstrings and there's just a huge pile of pre-existing issues. +# W503 Is supposed to be off by default but in the latest pycodestyle isn't. +# Also, both openstacksdk and Donald Knuth disagree with the rule. Line +# breaks should occur before the binary operator for readability. +ignore = H306,H4,W503 +show-source = True +exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build diff --git a/zuul.yaml b/zuul.yaml index 51b13832..e12683b2 100644 --- a/zuul.yaml +++ b/zuul.yaml @@ -1,9 +1,110 @@ +- secret: + name: zuul_eco_doc_exports_vault + data: + vault_role_id: !encrypted/pkcs1-oaep + - i+G23kbxo4w3iA7zeqcD4ZeEXkQfzfQZ9AZJemKMkhM1n2gHM35kA7OBCAwKurhTsEbVv + xtTgjW3S/fnhA45xc29DeAHluGh7SSY5GoVbyZG9aXrHOt7wMTxGBH1hzZcbA2Yts6IcV + kIdyrcN8/LQUNcDX1KNVQk22//MRlB+c81sOsByJtKIwbsuukU0PCpgnaaDh+VptpYOdU + YlbUN15q049fG9MdXY9NQuZWuL+w2bG/7jE9AvIVQbJFQHcTmY1fuy+fM9uSi5D/FH9sA + NXse3y7ncGbe+ps1wuagyMbWA0SI/SuoEABG0ePQ3QCSmZexyK7UuxxfWTVa7PwGEJDlr + bFQiHEz2DlufgY0LloSK8X0gu0dBo/AmFtoZASHRvu/8tcD16cGlLSK/IsoHa8NB8bCM3 + lyd95WLuFVEdytc7PxK0ccum/9MMNLcRXB9Tmd34wiZ19EF891J/YJIaTo/baurZ+87KY + lMuMp2P+PFM50LhZzsBD6kv2P4eKMYPsYpv2UVDw9xrOTgJJ4131r+MpUKADwdeFGa6ux + ovDK0GnZXCZ47MDbkY5esaLaCiIDKpJuiRp6rUKSxne5hYp9S4YyRGswjNHXGcWQatLeS + qkcsZ1nhfKnV0cTf1+6vCh8q09zW7v4IxQ8pj1O+bzNJ2hPuUJEnT1XIs5LZUg= + vault_token_path: !encrypted/pkcs1-oaep + - OIGL6UXXZuoFKe+v9u/53hmZkBOo+FYLiz16guzcVJ8MupWe6PtZ9LOggO8yDmNpzs6tQ + hQKdsnTJa78nXgZq25V/1wKVV/yz/JSPW84HAWllwN/+n9zqTDJuXDWJwaivGrBxCXLnD + PLAXauwB9KGfDSuWKtm8JEFPZvXrFSF8GQXO/hRhL4N1Vxf5cyNZ+nEiWCfJKx0ajXwW3 + X9LitwfOt4greHs+QAxUQHhaOPqEfPziIriKx9/aHVvKkCBPIXmxXcOKt8s6vSGbLDKy+ + MxxaE7FXH7fHlW4mhnwqDBUM6kRVOunR/iWrBrSspZp1DTsVxSunHxH5Cw8apveVyeZoL + Y6JsQAi4m4Oy+eybpcAXUOTmEeSgydGa0Vm0s3t96N2iiEnhT/MnAk2N5xNwO1CVr7Spn + XIZicW4xldAFiy7GKDfNGAgPiqzcpRL4QtzePTj1U5tB7LV9Mg25T3Z4IwxA9NiIM7mtF + 1qnH9ebWo88ddnR0wzCcEriNVHkS47vpY971kPCFPIlwHNobSA01VQiyDS/s7MgoEev+L + R6XGpSbLiduBo4kjiXen2RNeNBC+8QyYANKeaJs82py2H48dPQTpBt3gJbMPy9EqgO+qU + PHJ07yaa4zZsTOnG0e6iSWOC9nt7TOJVAmzwipXX33brJ7F8XxP3k1Tui6gFbk= + +- job: + name: otc-doc-exports-convert-base + parent: unittests + abstract: true + nodeset: ubuntu-focal + description: | + Convert doc exports from html to rst and generate corresponding rst diff + files. + pre-run: playbooks/pre.yaml + run: playbooks/run.yaml + +- job: + name: otc-doc-exports-convert-compute + parent: otc-doc-exports-convert-base + description: | + Convert Compute doc exports from html to rst and generate + corresponding rst patch files. + files: + - otc_doc_convertor + - docs/ecs + vars: + docs_update_data_file: "docs_compute.yaml" + +- job: + name: otc-doc-exports-convert-big-data + parent: otc-doc-exports-convert-base + description: | + Convert BigData doc exports from html to rst and generate + corresponding rst patch files. + files: + - otc_doc_convertor + - docs/modelarts + vars: + docs_update_data_file: "docs_big-data.yaml" + +- job: + name: otc-doc-exports-propose-update-compute + parent: otc-doc-exports-convert-compute + description: | + Propose Compute changes to the final projects + files. + post-run: playbooks/propose_update.yaml + files: + - otc_doc_convertor + - docs/ecs + secrets: + - secret: zuul_eco_doc_exports_vault + name: zuul_vault + pass-to-parent: true + +- job: + name: otc-doc-exports-propose-update-big-data + parent: otc-doc-exports-convert-big-data + description: | + Propose BigData changes to the final projects + files. + post-run: playbooks/propose_update.yaml + files: + - otc_doc_convertor + - docs/modelarts + secrets: + - secret: zuul_eco_doc_exports_vault + name: zuul_vault + pass-to-parent: true + - project: merge-mode: squash-merge default-branch: main check: jobs: - - noop + - otc-tox-pep8: + nodeset: ubuntu-focal + - otc-doc-exports-convert-compute + - otc-doc-exports-convert-big-data + check-post: + jobs: + - otc-doc-exports-propose-update-compute + - otc-doc-exports-propose-update-big-data gate: jobs: - - noop + - otc-tox-pep8: + nodeset: ubuntu-focal + - otc-doc-exports-convert-compute + - otc-doc-exports-convert-big-data