4 Commits

Author SHA1 Message Date
114cf56718 update runner on merge
All checks were successful
gl/check check status: success (114cf567183364ee6c5a02211c95dbab61b6f70e)
gl/gate gate status: success (114cf567183364ee6c5a02211c95dbab61b6f70e)
Updates Opensearch filters / update-opensearch-filters (pull_request) Successful in 10s
2025-07-29 09:29:33 +00:00
656b705bd5 rename runner
All checks were successful
Updates Opensearch filters / update-opensearch-filters (pull_request) Successful in 1m6s
gl/check check status: success (656b705bd5b03a5bf07425f00d19b9d2c44d1c60)
2025-07-29 09:25:32 +00:00
6a36d2e291 fix issue
Some checks failed
gl/check check status: pending (6a36d2e291a4e5ac288e9911baab6ade06f83c0c)
Updates Opensearch filters / update-opensearch-filters (pull_request) Has been cancelled
2025-07-29 09:23:16 +00:00
b716b40949 add opensearch update workflow
Some checks failed
gl/check check status: success (b716b40949a999bcf6651ff6805ee572b7e42984)
Updates Opensearch filters / update-opensearch-filters (pull_request) Has been cancelled
2025-07-29 09:14:24 +00:00
8 changed files with 0 additions and 392 deletions

View File

@ -1,62 +0,0 @@
name: Create Weekly Analytics Stats
on:
schedule:
# 03:00 UTC = 04:00 CET
- cron: "0 3 * * 1"
workflow_dispatch:
jobs:
run-analytics:
runs-on: ubuntu
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.PUSH_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests otc-metadata
- name: Run analytics for eu_de
env:
UMAMI_USERNAME: ${{ secrets.UMAMI_USERNAME }}
UMAMI_PASSWORD: ${{ secrets.UMAMI_PASSWORD }}
run: |
python ./tools/collect_statistics.py \
--website-id "${{ secrets.UMAMI_WEBSITE_ID }}" \
--cloud-environment "eu_de" \
--environment "public" \
--limit "10"
- name: Run analytics for swiss
env:
UMAMI_USERNAME: ${{ secrets.UMAMI_USERNAME }}
UMAMI_PASSWORD: ${{ secrets.UMAMI_PASSWORD }}
run: |
python ./tools/collect_statistics.py \
--website-id "${{ secrets.UMAMI_WEBSITE_ID }}" \
--cloud-environment "swiss" \
--environment "public" \
--limit "10"
- name: Commit and push results
run: |
git config --global user.name "gitea-actions[bot]"
git config --global user.email "actions@users.noreply.local"
git checkout -B analytics-update
git add otc_metadata/analytics/
if git diff --cached --quiet; then
echo "No changes to commit"
else
git commit -m "chore: update analytics data [skip ci]"
git push origin analytics-update --force
fi

View File

@ -1,18 +0,0 @@
name: Run Tox Check
on:
pull_request:
types: [opened, reopened, synchronize, edited]
jobs:
tox-py312:
runs-on: ubuntu
steps:
- uses: https://github.com/opentelekomcloud-infra/github-actions/.github/actions/tox-py-test@v1
tox-pep8:
runs-on: ubuntu
steps:
- uses: https://github.com/opentelekomcloud-infra/github-actions/.github/actions/tox-py-test@v1
with:
tox-env: pep8

View File

@ -1,48 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
import json
BASE_DIR = Path(__file__).resolve().parent
analytics_path = BASE_DIR / "public"
cloud_environments = [
'eu_de',
'swiss'
]
analytics_data = {k: [] for k in cloud_environments}
# Open and read the json data files
for env in cloud_environments:
file_path = analytics_path / f"{env}.json"
with file_path.open(encoding="utf-8") as file:
analytics_data[env] = json.load(file)
class AnalyticsData(object):
"""Encapsulate OTC Analystics data"""
def __init__(self):
self._analytics_data = analytics_data
def all_analytics_data(self):
"""returns all analytics data"""
return self._analytics_data
def analytics_data_by_cloud_environment(self, cloud_environment):
"""returns all analytics data"""
if cloud_environment and cloud_environment in self._analytics_data:
return self._analytics_data[cloud_environment]
else:
raise ValueError(f"cloud_environment '{cloud_environment}' does not exist.")

View File

@ -1,12 +0,0 @@
[
"evs",
"ims",
"ecs",
"cce",
"obs",
"rds",
"sfs",
"iam",
"elb",
"vpn"
]

View File

@ -1,12 +0,0 @@
[
"evs",
"ims",
"ecs",
"cce",
"obs",
"rds",
"iam",
"elb",
"vpn",
"cbr"
]

View File

@ -453,55 +453,3 @@ class Services(object):
res.sort(key=lambda x: x.get("name", "").lower())
return res
def all_services_by_cloud_environment(self, cloud_environment, environments):
"""Retrieve all services filtered by cloud_environment
"""
res = []
for srv in self.all_services:
if environments and cloud_environment:
for srv_cloud_environment in srv["cloud_environments"]:
if srv_cloud_environment["name"] == cloud_environment:
for environment in environments:
if srv_cloud_environment["visibility"] == environment:
res.append(srv)
else:
continue
else:
raise Exception("No cloud_environment or environments "
"specified in function all_services_by_cloud_environment.")
# Sort services
res.sort(key=lambda x: x.get("service_title", "").lower())
return res
def all_services_by_cloud_environment_as_dict(self, cloud_environment, environments):
"""Retrieve all services filtered by cloud_environment
Returns a dict keyed by service_type.
"""
res = {}
if not (environments and cloud_environment):
raise Exception(
"No cloud_environment or environments specified in function all_services_by_cloud_environment."
)
for srv in self.all_services:
for srv_cloud_environment in srv.get("cloud_environments", []):
if srv_cloud_environment.get("name") == cloud_environment:
for environment in environments:
if srv_cloud_environment.get("visibility") == environment:
service_type = srv.get("service_type")
if service_type:
res[service_type] = srv
break
res = dict(
sorted(
res.items(),
key=lambda item: item[1].get("service_type", "").lower()
)
)
return res

File diff suppressed because it is too large Load Diff