4 Commits

Author SHA1 Message Date
114cf56718 update runner on merge
All checks were successful
gl/check check status: success (114cf567183364ee6c5a02211c95dbab61b6f70e)
gl/gate gate status: success (114cf567183364ee6c5a02211c95dbab61b6f70e)
Updates Opensearch filters / update-opensearch-filters (pull_request) Successful in 10s
2025-07-29 09:29:33 +00:00
656b705bd5 rename runner
All checks were successful
Updates Opensearch filters / update-opensearch-filters (pull_request) Successful in 1m6s
gl/check check status: success (656b705bd5b03a5bf07425f00d19b9d2c44d1c60)
2025-07-29 09:25:32 +00:00
6a36d2e291 fix issue
Some checks failed
gl/check check status: pending (6a36d2e291a4e5ac288e9911baab6ade06f83c0c)
Updates Opensearch filters / update-opensearch-filters (pull_request) Has been cancelled
2025-07-29 09:23:16 +00:00
b716b40949 add opensearch update workflow
Some checks failed
gl/check check status: success (b716b40949a999bcf6651ff6805ee572b7e42984)
Updates Opensearch filters / update-opensearch-filters (pull_request) Has been cancelled
2025-07-29 09:14:24 +00:00
6 changed files with 0 additions and 314 deletions

View File

@ -1,62 +0,0 @@
name: Create Weekly Analytics Stats
on:
schedule:
# 03:00 UTC = 04:00 CET
- cron: "0 3 * * 1"
workflow_dispatch:
jobs:
run-analytics:
runs-on: ubuntu
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.PUSH_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests otc-metadata
- name: Run analytics for eu_de
env:
UMAMI_USERNAME: ${{ secrets.UMAMI_USERNAME }}
UMAMI_PASSWORD: ${{ secrets.UMAMI_PASSWORD }}
run: |
python ./tools/collect_statistics.py \
--website-id "${{ secrets.UMAMI_WEBSITE_ID }}" \
--cloud-environment "eu_de" \
--environment "public" \
--limit "10"
- name: Run analytics for swiss
env:
UMAMI_USERNAME: ${{ secrets.UMAMI_USERNAME }}
UMAMI_PASSWORD: ${{ secrets.UMAMI_PASSWORD }}
run: |
python ./tools/collect_statistics.py \
--website-id "${{ secrets.UMAMI_WEBSITE_ID }}" \
--cloud-environment "swiss" \
--environment "public" \
--limit "10"
- name: Commit and push results
id: commit_step
run: |
git config --global user.name "gitea-actions[bot]"
git config --global user.email "actions@users.noreply.local"
git checkout -B analytics-update
git add analytics/
if git diff --cached --quiet; then
else
git commit -m "chore: update analytics data"
git push origin analytics-update --force
fi

View File

@ -1,18 +0,0 @@
name: Run Tox Check
on:
pull_request:
types: [opened, reopened, synchronize, edited]
jobs:
tox-py312:
runs-on: ubuntu
steps:
- uses: https://github.com/opentelekomcloud-infra/github-actions/.github/actions/tox-py-test@v1
tox-pep8:
runs-on: ubuntu
steps:
- uses: https://github.com/opentelekomcloud-infra/github-actions/.github/actions/tox-py-test@v1
with:
tox-env: pep8

View File

@ -1,12 +0,0 @@
[
"evs",
"ims",
"ecs",
"cce",
"obs",
"rds",
"sfs",
"iam",
"config",
"elb"
]

View File

@ -1,12 +0,0 @@
[
"evs",
"ims",
"ecs",
"cce",
"obs",
"rds",
"iam",
"elb",
"cbr",
"vpn"
]

View File

@ -453,25 +453,3 @@ class Services(object):
res.sort(key=lambda x: x.get("name", "").lower())
return res
def all_services_by_cloud_environment(self, cloud_environment, environments):
"""Retrieve all services filtered by cloud_environment
"""
res = []
for srv in self.all_services:
if environments and cloud_environment:
for srv_cloud_environment in srv["cloud_environments"]:
if srv_cloud_environment["name"] == cloud_environment:
for environment in environments:
if srv_cloud_environment["visibility"] == environment:
res.append(srv)
else:
continue
else:
raise Exception("No cloud_environment or environments "
"specified in function all_services_by_cloud_environment.")
# Sort services
res.sort(key=lambda x: x.get("service_title", "").lower())
return res

File diff suppressed because it is too large Load Diff