From 65db90e139f6e7d31752fa37f5e46bffa78c2f98 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:04:36 +0200 Subject: [PATCH 01/61] HHT-669-core-implementation --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 955342b..82e7ccd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ jsonpath-ng = "^1.6.1" httpx = "^0.27.0" pydantic-settings = "^2.2.1" pytest-vcr = "^1.0.2" +influxdb-client = "^1.43.0" [tool.poetry.group.dev.dependencies] black = "^23.12" From 46c9355751669508a44a6d71d0836ca46284952c Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:05:07 +0200 Subject: [PATCH 02/61] HHT-669: influxdb client sketch --- app/clients/influxdb/__init__.py | 0 app/clients/influxdb/influxdb_client.py | 6 +++++ .../influxdb/influxdb_configuration.py | 23 +++++++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 app/clients/influxdb/__init__.py create mode 100644 app/clients/influxdb/influxdb_client.py create mode 100644 app/clients/influxdb/influxdb_configuration.py diff --git a/app/clients/influxdb/__init__.py b/app/clients/influxdb/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/clients/influxdb/influxdb_client.py b/app/clients/influxdb/influxdb_client.py new file mode 100644 index 0000000..01233be --- /dev/null +++ b/app/clients/influxdb/influxdb_client.py @@ -0,0 +1,6 @@ +from typing import Any, List + + +class InfluxDBClient: + async def query(self, query: str) -> List[Any]: + raise NotImplementedError diff --git a/app/clients/influxdb/influxdb_configuration.py b/app/clients/influxdb/influxdb_configuration.py new file mode 100644 index 0000000..4186023 --- /dev/null +++ b/app/clients/influxdb/influxdb_configuration.py @@ -0,0 +1,23 @@ +from typing import List + +from dataclasses import dataclass + + +@dataclass +class QueryConfig: + query: str + measurement_name: str + source: str + property: str + unit: str + is_aggregated: bool + + +@dataclass +class InfluxDBConfiguration: + url: str + token: str + org: str + pod_queries: List[QueryConfig] + node_queries: List[QueryConfig] + workload_queries: List[QueryConfig] From 9e9258593597e5a42ce2c6f55d5eecf456a5bbf5 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:05:24 +0200 Subject: [PATCH 03/61] HHT-669: k8s client sketch --- app/clients/k8s/__init__.py | 0 app/clients/k8s/k8s_client.py | 24 ++++++++++++++++++++++++ 2 files changed, 24 insertions(+) create mode 100644 app/clients/k8s/__init__.py create mode 100644 app/clients/k8s/k8s_client.py diff --git a/app/clients/k8s/__init__.py b/app/clients/k8s/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py new file mode 100644 index 0000000..b4f3f1f --- /dev/null +++ b/app/clients/k8s/k8s_client.py @@ -0,0 +1,24 @@ +from typing import Any, Dict, List + + +class K8SClient: + async def get_nodes(self) -> List[Dict[str, Any]]: + raise NotImplementedError + + async def get_pods(self) -> List[Dict[str, Any]]: + raise NotImplementedError + + async def get_replicasets(self) -> List[Dict[str, Any]]: + raise NotImplementedError + + async def get_deployments(self) -> List[Dict[str, Any]]: + raise NotImplementedError + + async def get_daemonsets(self) -> List[Dict[str, Any]]: + raise NotImplementedError + + async def get_statefullsets(self) -> List[Dict[str, Any]]: + raise NotImplementedError + + async def get_jobs(self) -> List[Dict[str, Any]]: + raise NotImplementedError From 1fbd10fca5539b8d0a04bf245c1ded166da447b4 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:05:48 +0200 Subject: [PATCH 04/61] HHT-669: metadata service client sketch --- app/clients/metadata/__init__.py | 0 app/clients/metadata/metadata_service_client.py | 11 +++++++++++ app/clients/metadata/model.py | 2 ++ 3 files changed, 13 insertions(+) create mode 100644 app/clients/metadata/__init__.py create mode 100644 app/clients/metadata/metadata_service_client.py create mode 100644 app/clients/metadata/model.py diff --git a/app/clients/metadata/__init__.py b/app/clients/metadata/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/clients/metadata/metadata_service_client.py b/app/clients/metadata/metadata_service_client.py new file mode 100644 index 0000000..d367035 --- /dev/null +++ b/app/clients/metadata/metadata_service_client.py @@ -0,0 +1,11 @@ +from typing import List + +from app.clients.metadata.model import Triple + + +class MetadataServiceClient: + async def query(self, host_and_port: str, sparql: str) -> List[Triple]: + raise NotImplementedError + + async def insert(self, host_and_port: str, graph: str) -> None: + raise NotImplementedError diff --git a/app/clients/metadata/model.py b/app/clients/metadata/model.py new file mode 100644 index 0000000..d491160 --- /dev/null +++ b/app/clients/metadata/model.py @@ -0,0 +1,2 @@ +class Triple: + pass From 66662f5e9853208da2bafaf70e4d9d77ce6d4a26 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:06:39 +0200 Subject: [PATCH 05/61] HHT-669: core sketch --- app/core/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 app/core/__init__.py diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000..e69de29 From b8c8383729b707b95e5081a42298d954d603b62a Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:06:55 +0200 Subject: [PATCH 06/61] HHT-669: dkg slice store sketch --- app/core/dkg_slice_store.py | 38 +++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 app/core/dkg_slice_store.py diff --git a/app/core/dkg_slice_store.py b/app/core/dkg_slice_store.py new file mode 100644 index 0000000..9c5110b --- /dev/null +++ b/app/core/dkg_slice_store.py @@ -0,0 +1,38 @@ +from typing import Dict, List, Set + +from dataclasses import dataclass + +from app.kg.graph import Graph + + +@dataclass +class KGSliceId: + node_ip: str + port: int + + +@dataclass +class DKGSlice: + slice_id: KGSliceId + graph: Graph + timestamp: int + + +class DKGSliceStore: + slices: Dict[KGSliceId, DKGSlice] + updates: Set[KGSliceId] + + def __init__(self): + self.slices = {} + self.updates = set() + + def update(self, slice: DKGSlice) -> None: + existing = self.slices.get(slice.slice_id) + if existing != slice: + self.slices[slice.slice_id] = slice + self.updates.add(slice.slice_id) + + def drain_updates(self) -> List[DKGSlice]: + to_consume = self.updates + self.updates = set() + return [self.slices[slice_id] for slice_id in to_consume] From 4f0ae81f0b4fbc250bc511442c2dbe469b08f7db Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:08:25 +0200 Subject: [PATCH 07/61] HHT-669: sketch of repositories --- app/core/influxdb_repository.py | 33 +++++++++++++++++++++++++++++++++ app/core/k8s_repository.py | 26 ++++++++++++++++++++++++++ app/core/kg_repository.py | 23 +++++++++++++++++++++++ 3 files changed, 82 insertions(+) create mode 100644 app/core/influxdb_repository.py create mode 100644 app/core/k8s_repository.py create mode 100644 app/core/kg_repository.py diff --git a/app/core/influxdb_repository.py b/app/core/influxdb_repository.py new file mode 100644 index 0000000..a68533e --- /dev/null +++ b/app/core/influxdb_repository.py @@ -0,0 +1,33 @@ +from typing import Set + +from dataclasses import dataclass, field + +from app.clients.influxdb.influxdb_client import InfluxDBClient + + +@dataclass +class Metric: + measurement_name: str + metric_name: str + value: float + source: str + + +@dataclass +class MetricsSnapshot: + pod_metrics: Set[Metric] = field(default_factory=set) + node_metrics: Set[Metric] = field(default_factory=set) + deployment_metrics: Set[Metric] = field(default_factory=set) + + +class InfluxDBRepository: + client: InfluxDBClient + + def __init__(self, client: InfluxDBClient): + self.client = client + + async def fetch(self) -> MetricsSnapshot: + # query_api = self.client.query_api() + # query = "" + # result = await query_api.query(query) + return MetricsSnapshot() diff --git a/app/core/k8s_repository.py b/app/core/k8s_repository.py new file mode 100644 index 0000000..4309a50 --- /dev/null +++ b/app/core/k8s_repository.py @@ -0,0 +1,26 @@ +from typing import List + +from dataclasses import dataclass, field + +from app.clients.k8s.k8s_client import K8SClient + + +@dataclass +class ClusterSnapshot: + pods: List[str] = field(default_factory=list) + nodes: List[str] = field(default_factory=list) + deployments: List[str] = field(default_factory=list) + jobs: List[str] = field(default_factory=list) + statefullsets: List[str] = field(default_factory=list) + daemonsets: List[str] = field(default_factory=list) + replicasets: List[str] = field(default_factory=list) + + +class K8SRepository: + k8s_client: K8SClient + + def __init__(self, k8s_client: K8SClient): + self.k8s_client = k8s_client + + async def fetch_snapshot(self) -> ClusterSnapshot: + return ClusterSnapshot() diff --git a/app/core/kg_repository.py b/app/core/kg_repository.py new file mode 100644 index 0000000..86ca5bc --- /dev/null +++ b/app/core/kg_repository.py @@ -0,0 +1,23 @@ +from dataclasses import dataclass + +from app.clients.metadata.metadata_service_client import MetadataServiceClient +from app.kg.graph import Graph +from app.kg.inmemory_graph import InMemoryGraph + + +@dataclass +class ExistingResources: + pass + + +class KGRepository: + metadata_client: MetadataServiceClient + + def __init__(self, metadata_client: MetadataServiceClient): + self.metadata_client = metadata_client + + async def query(self, query: str) -> Graph: + return InMemoryGraph() + + async def update(self, graph: Graph) -> None: + pass From 32f6189b1df53c32887dfd0a6101971cc3eed187 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:08:43 +0200 Subject: [PATCH 08/61] HHT-669: core components --- app/core/kg_builder.py | 26 ++++++++++++++++++++++++++ app/core/kg_updater.py | 14 ++++++++++++++ 2 files changed, 40 insertions(+) create mode 100644 app/core/kg_builder.py create mode 100644 app/core/kg_updater.py diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py new file mode 100644 index 0000000..4905e93 --- /dev/null +++ b/app/core/kg_builder.py @@ -0,0 +1,26 @@ +from app.pipeline.dkg_slice_store import DKGSliceStore +from app.pipeline.source.influxdb_repository import InfluxDBRepository +from app.pipeline.source.k8s_repository import K8SRepository +from app.pipeline.source.kg_repository import KGRepository + + +class KGBuilder: + dkg_slice_store: DKGSliceStore + k8s_repository: K8SRepository + kg_repository: KGRepository + influxdb_repository: InfluxDBRepository + + def __init__( + self, + dkg_slice_store: DKGSliceStore, + k8s_repository: K8SRepository, + kg_repository: KGRepository, + influxdb_repository: InfluxDBRepository, + ): + self.dkg_slice_store = dkg_slice_store + self.k8s_repository = k8s_repository + self.kg_repository = kg_repository + self.influxdb_repository = influxdb_repository + + async def run(self) -> None: + pass diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py new file mode 100644 index 0000000..30bf3ad --- /dev/null +++ b/app/core/kg_updater.py @@ -0,0 +1,14 @@ +from app.pipeline.dkg_slice_store import DKGSliceStore +from app.pipeline.source.kg_repository import KGRepository + + +class KGUpdater: + dkg_slice_store: DKGSliceStore + kg_repository: KGRepository + + def __init__(self, dkg_slice_store: DKGSliceStore, kg_repository: KGRepository): + self.dkg_slice_store = dkg_slice_store + self.kg_repository = kg_repository + + async def run(self) -> None: + pass From eb6a4e2b45454bc750b89d1d1649efc257207eab Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:08:59 +0200 Subject: [PATCH 09/61] HHT-669: context sketch --- app/context.py | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 app/context.py diff --git a/app/context.py b/app/context.py new file mode 100644 index 0000000..704d10b --- /dev/null +++ b/app/context.py @@ -0,0 +1,40 @@ +import asyncio + +from app.clients.influxdb.influxdb_client import InfluxDBClient +from app.clients.k8s.k8s_client import K8SClient +from app.clients.metadata.metadata_service_client import MetadataServiceClient +from app.pipeline.dkg_slice_store import DKGSliceStore +from app.pipeline.kg_builder import KGBuilder +from app.pipeline.kg_updater import KGUpdater +from app.pipeline.source.influxdb_repository import InfluxDBRepository +from app.pipeline.source.k8s_repository import K8SRepository +from app.pipeline.source.kg_repository import KGRepository + + +class KGExporterContext: + builder: KGBuilder + updater: KGUpdater + runner: asyncio.Runner + dkg_slice_store: DKGSliceStore + + def __init__( + self, + metadata_client: MetadataServiceClient, + k8s_client: K8SClient, + influxdb_client: InfluxDBClient, + ): + kg_repository = KGRepository(metadata_client) + k8s_repository = K8SRepository(k8s_client) + influxdb_repository = InfluxDBRepository(influxdb_client) + self.dkg_slice_store = DKGSliceStore() + self.builder = KGBuilder( + self.dkg_slice_store, k8s_repository, kg_repository, influxdb_repository + ) + self.updater = KGUpdater(self.dkg_slice_store, kg_repository) + self.runner = asyncio.Runner() + + def start(self) -> None: + pass + + def stop(self) -> None: + pass From e06914662fc2fe0b08c4592da4f8749d8b98afeb Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 29 May 2024 14:56:45 +0200 Subject: [PATCH 10/61] HHT-669: mock k8s client --- app/clients/k8s/k8s_client.py | 31 +++++++++++++ app/clients/k8s/mock_k8s_client.py | 71 ++++++++++++++++++++++++++++++ app/clients/k8s/test_k8s_client.py | 6 +++ app/context.py | 12 ++--- app/core/k8s_repository.py | 22 ++++----- app/core/kg_builder.py | 8 ++-- app/core/kg_updater.py | 4 +- 7 files changed, 131 insertions(+), 23 deletions(-) create mode 100644 app/clients/k8s/mock_k8s_client.py create mode 100644 app/clients/k8s/test_k8s_client.py diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py index b4f3f1f..4b00d1a 100644 --- a/app/clients/k8s/k8s_client.py +++ b/app/clients/k8s/k8s_client.py @@ -1,7 +1,38 @@ from typing import Any, Dict, List +from dataclasses import dataclass, field + + +@dataclass +class ResourceSnapshot: + pods: List[Dict[str, Any]] = field(default_factory=list) + nodes: List[Dict[str, Any]] = field(default_factory=list) + deployments: List[Dict[str, Any]] = field(default_factory=list) + jobs: List[Dict[str, Any]] = field(default_factory=list) + statefullsets: List[Dict[str, Any]] = field(default_factory=list) + daemonsets: List[Dict[str, Any]] = field(default_factory=list) + replicasets: List[Dict[str, Any]] = field(default_factory=list) + class K8SClient: + async def fetch_snapshot(self) -> ResourceSnapshot: + nodes = await self.get_nodes() + pods = await self.get_pods() + deployments = await self.get_deployments() + jobs = await self.get_jobs() + statefullsets = await self.get_statefullsets() + daemonsets = await self.get_daemonsets() + replicasets = await self.get_replicasets() + return ResourceSnapshot( + pods=pods, + nodes=nodes, + deployments=deployments, + jobs=jobs, + statefullsets=statefullsets, + daemonsets=daemonsets, + replicasets=replicasets, + ) + async def get_nodes(self) -> List[Dict[str, Any]]: raise NotImplementedError diff --git a/app/clients/k8s/mock_k8s_client.py b/app/clients/k8s/mock_k8s_client.py new file mode 100644 index 0000000..fe7cec4 --- /dev/null +++ b/app/clients/k8s/mock_k8s_client.py @@ -0,0 +1,71 @@ +from typing import Any, Dict, List, override + +from app.clients.k8s.k8s_client import K8SClient + + +class MockK8SClient(K8SClient): + nodes: List[Dict[str, Any]] + pods: List[Dict[str, Any]] + replicasets: List[Dict[str, Any]] + deployments: List[Dict[str, Any]] + daemonsets: List[Dict[str, Any]] + statefullsets: List[Dict[str, Any]] + jobs: List[Dict[str, Any]] + + def __init__(self): + self.nodes = [] + self.pods = [] + self.replicasets = [] + self.deployments = [] + self.daemonsets = [] + self.statefullsets = [] + self.jobs = [] + + def mock_nodes(self, nodes: List[Dict[str, Any]]) -> None: + self.nodes = nodes + + @override + async def get_nodes(self) -> List[Dict[str, Any]]: + return self.nodes + + def mock_pods(self, pods: List[Dict[str, Any]]) -> None: + self.pods = pods + + @override + async def get_pods(self) -> List[Dict[str, Any]]: + return self.pods + + def mock_replicasets(self, replicasets: List[Dict[str, Any]]) -> None: + self.replicasets = replicasets + + @override + async def get_replicasets(self) -> List[Dict[str, Any]]: + return self.replicasets + + def mock_deployments(self, deployments: List[Dict[str, Any]]) -> None: + self.deployments = deployments + + @override + async def get_deployments(self) -> List[Dict[str, Any]]: + return self.deployments + + def mock_daemonsets(self, daemonsets: List[Dict[str, Any]]) -> None: + self.daemonsets = daemonsets + + @override + async def get_daemonsets(self) -> List[Dict[str, Any]]: + return self.daemonsets + + def mock_statefullsets(self, statefullsets: List[Dict[str, Any]]) -> None: + self.statefullsets = statefullsets + + @override + async def get_statefullsets(self) -> List[Dict[str, Any]]: + return self.statefullsets + + def mock_jobs(self, jobs: List[Dict[str, Any]]) -> None: + self.jobs = jobs + + @override + async def get_jobs(self) -> List[Dict[str, Any]]: + return self.jobs diff --git a/app/clients/k8s/test_k8s_client.py b/app/clients/k8s/test_k8s_client.py new file mode 100644 index 0000000..2beb026 --- /dev/null +++ b/app/clients/k8s/test_k8s_client.py @@ -0,0 +1,6 @@ +from unittest import TestCase + + +class K8SRepositoryTest(TestCase): + def test_get_resource_snapshot(self): + pass diff --git a/app/context.py b/app/context.py index 704d10b..026c737 100644 --- a/app/context.py +++ b/app/context.py @@ -3,12 +3,12 @@ from app.clients.influxdb.influxdb_client import InfluxDBClient from app.clients.k8s.k8s_client import K8SClient from app.clients.metadata.metadata_service_client import MetadataServiceClient -from app.pipeline.dkg_slice_store import DKGSliceStore -from app.pipeline.kg_builder import KGBuilder -from app.pipeline.kg_updater import KGUpdater -from app.pipeline.source.influxdb_repository import InfluxDBRepository -from app.pipeline.source.k8s_repository import K8SRepository -from app.pipeline.source.kg_repository import KGRepository +from app.core.dkg_slice_store import DKGSliceStore +from app.core.influxdb_repository import InfluxDBRepository +from app.core.k8s_repository import K8SRepository +from app.core.kg_builder import KGBuilder +from app.core.kg_repository import KGRepository +from app.core.kg_updater import KGUpdater class KGExporterContext: diff --git a/app/core/k8s_repository.py b/app/core/k8s_repository.py index 4309a50..ede3f4a 100644 --- a/app/core/k8s_repository.py +++ b/app/core/k8s_repository.py @@ -1,4 +1,4 @@ -from typing import List +from typing import Any, Dict, List from dataclasses import dataclass, field @@ -6,14 +6,14 @@ @dataclass -class ClusterSnapshot: - pods: List[str] = field(default_factory=list) - nodes: List[str] = field(default_factory=list) - deployments: List[str] = field(default_factory=list) - jobs: List[str] = field(default_factory=list) - statefullsets: List[str] = field(default_factory=list) - daemonsets: List[str] = field(default_factory=list) - replicasets: List[str] = field(default_factory=list) +class ResourceSnapshot: + pods: List[Dict[str, Any]] = field(default_factory=list) + nodes: List[Dict[str, Any]] = field(default_factory=list) + deployments: List[Dict[str, Any]] = field(default_factory=list) + jobs: List[Dict[str, Any]] = field(default_factory=list) + statefullsets: List[Dict[str, Any]] = field(default_factory=list) + daemonsets: List[Dict[str, Any]] = field(default_factory=list) + replicasets: List[Dict[str, Any]] = field(default_factory=list) class K8SRepository: @@ -22,5 +22,5 @@ class K8SRepository: def __init__(self, k8s_client: K8SClient): self.k8s_client = k8s_client - async def fetch_snapshot(self) -> ClusterSnapshot: - return ClusterSnapshot() + async def fetch_snapshot(self) -> ResourceSnapshot: + return ResourceSnapshot() diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 4905e93..a37678e 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -1,7 +1,7 @@ -from app.pipeline.dkg_slice_store import DKGSliceStore -from app.pipeline.source.influxdb_repository import InfluxDBRepository -from app.pipeline.source.k8s_repository import K8SRepository -from app.pipeline.source.kg_repository import KGRepository +from app.core.dkg_slice_store import DKGSliceStore +from app.core.influxdb_repository import InfluxDBRepository +from app.core.k8s_repository import K8SRepository +from app.core.kg_repository import KGRepository class KGBuilder: diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py index 30bf3ad..67e2ba9 100644 --- a/app/core/kg_updater.py +++ b/app/core/kg_updater.py @@ -1,5 +1,5 @@ -from app.pipeline.dkg_slice_store import DKGSliceStore -from app.pipeline.source.kg_repository import KGRepository +from app.core.dkg_slice_store import DKGSliceStore +from app.core.kg_repository import KGRepository class KGUpdater: From 04c359362d7ecdc06fce9cc77b14304d7368087d Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 30 May 2024 12:58:39 +0200 Subject: [PATCH 11/61] HHT-669: mock k8s client --- app/clients/k8s/k8s_client.py | 26 +++++++++++++----- app/clients/k8s/mock_k8s_client.py | 9 +------ app/clients/k8s/test_k8s_client.py | 42 ++++++++++++++++++++++++++++-- app/core/k8s_repository.py | 26 ------------------ app/core/kg_builder.py | 8 +++--- 5 files changed, 64 insertions(+), 47 deletions(-) delete mode 100644 app/core/k8s_repository.py diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py index 4b00d1a..b83834e 100644 --- a/app/clients/k8s/k8s_client.py +++ b/app/clients/k8s/k8s_client.py @@ -1,5 +1,6 @@ from typing import Any, Dict, List +import asyncio from dataclasses import dataclass, field @@ -16,13 +17,24 @@ class ResourceSnapshot: class K8SClient: async def fetch_snapshot(self) -> ResourceSnapshot: - nodes = await self.get_nodes() - pods = await self.get_pods() - deployments = await self.get_deployments() - jobs = await self.get_jobs() - statefullsets = await self.get_statefullsets() - daemonsets = await self.get_daemonsets() - replicasets = await self.get_replicasets() + result = await asyncio.gather( + self.get_nodes(), + self.get_pods(), + self.get_deployments(), + self.get_jobs(), + self.get_statefullsets(), + self.get_daemonsets(), + self.get_replicasets(), + ) + ( + nodes, + pods, + deployments, + jobs, + statefullsets, + daemonsets, + replicasets, + ) = result return ResourceSnapshot( pods=pods, nodes=nodes, diff --git a/app/clients/k8s/mock_k8s_client.py b/app/clients/k8s/mock_k8s_client.py index fe7cec4..44c4d80 100644 --- a/app/clients/k8s/mock_k8s_client.py +++ b/app/clients/k8s/mock_k8s_client.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, override +from typing import Any, Dict, List from app.clients.k8s.k8s_client import K8SClient @@ -24,48 +24,41 @@ def __init__(self): def mock_nodes(self, nodes: List[Dict[str, Any]]) -> None: self.nodes = nodes - @override async def get_nodes(self) -> List[Dict[str, Any]]: return self.nodes def mock_pods(self, pods: List[Dict[str, Any]]) -> None: self.pods = pods - @override async def get_pods(self) -> List[Dict[str, Any]]: return self.pods def mock_replicasets(self, replicasets: List[Dict[str, Any]]) -> None: self.replicasets = replicasets - @override async def get_replicasets(self) -> List[Dict[str, Any]]: return self.replicasets def mock_deployments(self, deployments: List[Dict[str, Any]]) -> None: self.deployments = deployments - @override async def get_deployments(self) -> List[Dict[str, Any]]: return self.deployments def mock_daemonsets(self, daemonsets: List[Dict[str, Any]]) -> None: self.daemonsets = daemonsets - @override async def get_daemonsets(self) -> List[Dict[str, Any]]: return self.daemonsets def mock_statefullsets(self, statefullsets: List[Dict[str, Any]]) -> None: self.statefullsets = statefullsets - @override async def get_statefullsets(self) -> List[Dict[str, Any]]: return self.statefullsets def mock_jobs(self, jobs: List[Dict[str, Any]]) -> None: self.jobs = jobs - @override async def get_jobs(self) -> List[Dict[str, Any]]: return self.jobs diff --git a/app/clients/k8s/test_k8s_client.py b/app/clients/k8s/test_k8s_client.py index 2beb026..ccb9fea 100644 --- a/app/clients/k8s/test_k8s_client.py +++ b/app/clients/k8s/test_k8s_client.py @@ -1,6 +1,44 @@ +from typing import Any, Dict, List + +import asyncio +import dataclasses from unittest import TestCase +from app.clients.k8s.k8s_client import ResourceSnapshot +from app.clients.k8s.mock_k8s_client import MockK8SClient + + +@dataclasses.dataclass +class TestClass: + value: List[Dict[str, Any]] + value2: List[Dict[str, Any]] + + +class K8SClientTest(TestCase): + loop: asyncio.AbstractEventLoop + + def setUp(self): + self.loop = asyncio.get_event_loop() + self.maxDiff = None -class K8SRepositoryTest(TestCase): def test_get_resource_snapshot(self): - pass + client = MockK8SClient() + client.mock_daemonsets([{"daemonsets": "fake"}]) + client.mock_deployments([{"deployments": "fake"}]) + client.mock_jobs([{"jobs": "fake"}]) + client.mock_nodes([{"nodes": "fake"}]) + client.mock_pods([{"pods": "fake"}]) + client.mock_replicasets([{"replicasets": "fake"}]) + client.mock_statefullsets([{"statefullsets": "fake"}]) + actual = asyncio.run(client.fetch_snapshot()) + + expected = ResourceSnapshot( + pods=[{"pods": "fake"}], + nodes=[{"nodes": "fake"}], + deployments=[{"deployments": "fake"}], + jobs=[{"jobs": "fake"}], + statefullsets=[{"statefullsets": "fake"}], + daemonsets=[{"daemonsets": "fake"}], + replicasets=[{"replicasets": "fake"}], + ) + self.assertEqual(expected, actual) diff --git a/app/core/k8s_repository.py b/app/core/k8s_repository.py deleted file mode 100644 index ede3f4a..0000000 --- a/app/core/k8s_repository.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Any, Dict, List - -from dataclasses import dataclass, field - -from app.clients.k8s.k8s_client import K8SClient - - -@dataclass -class ResourceSnapshot: - pods: List[Dict[str, Any]] = field(default_factory=list) - nodes: List[Dict[str, Any]] = field(default_factory=list) - deployments: List[Dict[str, Any]] = field(default_factory=list) - jobs: List[Dict[str, Any]] = field(default_factory=list) - statefullsets: List[Dict[str, Any]] = field(default_factory=list) - daemonsets: List[Dict[str, Any]] = field(default_factory=list) - replicasets: List[Dict[str, Any]] = field(default_factory=list) - - -class K8SRepository: - k8s_client: K8SClient - - def __init__(self, k8s_client: K8SClient): - self.k8s_client = k8s_client - - async def fetch_snapshot(self) -> ResourceSnapshot: - return ResourceSnapshot() diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index a37678e..cac77a1 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -1,24 +1,24 @@ +from app.clients.k8s.k8s_client import K8SClient from app.core.dkg_slice_store import DKGSliceStore from app.core.influxdb_repository import InfluxDBRepository -from app.core.k8s_repository import K8SRepository from app.core.kg_repository import KGRepository class KGBuilder: dkg_slice_store: DKGSliceStore - k8s_repository: K8SRepository + k8s_client: K8SClient kg_repository: KGRepository influxdb_repository: InfluxDBRepository def __init__( self, dkg_slice_store: DKGSliceStore, - k8s_repository: K8SRepository, + k8s_client: K8SClient, kg_repository: KGRepository, influxdb_repository: InfluxDBRepository, ): self.dkg_slice_store = dkg_slice_store - self.k8s_repository = k8s_repository + self.k8s_client = k8s_client self.kg_repository = kg_repository self.influxdb_repository = influxdb_repository From 64d87d4915d15b0f2f7d76abcbece221c2923946 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 30 May 2024 13:58:46 +0200 Subject: [PATCH 12/61] HHT-669: influx db client --- app/clients/influxdb/influxdb_client.py | 9 +++- app/clients/influxdb/influxdb_client_impl.py | 40 ++++++++++++++++ .../influxdb/influxdb_configuration.py | 23 ---------- app/clients/influxdb/influxdb_settings.py | 9 ++++ app/clients/influxdb/metric_value.py | 12 +++++ app/clients/influxdb/mock_infuxdbclient.py | 18 ++++++++ app/clients/influxdb/query_result_parser.py | 28 +++++++++++ app/clients/influxdb/simple_result_parser.py | 17 +++++++ app/clients/influxdb/test_influxdb_client.py | 13 ++++++ .../influxdb/test_simple_result_parser.py | 42 +++++++++++++++++ app/clients/k8s/k8s_client_impl.py | 46 +++++++++++++++++++ app/clients/k8s/k8s_settings.py | 6 +++ app/clients/metadata/test_metadata_service.py | 9 ++++ app/core/influxdb_repository.py | 24 +++++++++- pyproject.toml | 1 + 15 files changed, 270 insertions(+), 27 deletions(-) create mode 100644 app/clients/influxdb/influxdb_client_impl.py delete mode 100644 app/clients/influxdb/influxdb_configuration.py create mode 100644 app/clients/influxdb/influxdb_settings.py create mode 100644 app/clients/influxdb/metric_value.py create mode 100644 app/clients/influxdb/mock_infuxdbclient.py create mode 100644 app/clients/influxdb/query_result_parser.py create mode 100644 app/clients/influxdb/simple_result_parser.py create mode 100644 app/clients/influxdb/test_influxdb_client.py create mode 100644 app/clients/influxdb/test_simple_result_parser.py create mode 100644 app/clients/k8s/k8s_client_impl.py create mode 100644 app/clients/k8s/k8s_settings.py create mode 100644 app/clients/metadata/test_metadata_service.py diff --git a/app/clients/influxdb/influxdb_client.py b/app/clients/influxdb/influxdb_client.py index 01233be..a6f90d3 100644 --- a/app/clients/influxdb/influxdb_client.py +++ b/app/clients/influxdb/influxdb_client.py @@ -1,6 +1,11 @@ -from typing import Any, List +from typing import List + +from app.clients.influxdb.metric_value import MetricValue +from app.clients.influxdb.query_result_parser import QueryResultParser class InfluxDBClient: - async def query(self, query: str) -> List[Any]: + async def query( + self, query: str, result_parser: QueryResultParser + ) -> List[MetricValue]: raise NotImplementedError diff --git a/app/clients/influxdb/influxdb_client_impl.py b/app/clients/influxdb/influxdb_client_impl.py new file mode 100644 index 0000000..8f2c329 --- /dev/null +++ b/app/clients/influxdb/influxdb_client_impl.py @@ -0,0 +1,40 @@ +from typing import List + +from influxdb_client.client.flux_table import TableList +from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync + +from app.clients.influxdb.influxdb_client import InfluxDBClient +from app.clients.influxdb.influxdb_settings import InfluxDBSettings +from app.clients.influxdb.metric_value import MetricValue +from app.clients.influxdb.query_result_parser import QueryResultParser + + +class InfluxDBClientImpl(InfluxDBClient): + settings: InfluxDBSettings + async_client: InfluxDBClientAsync + + def __init__(self, settings: InfluxDBSettings): + self.settings = settings + + async def query( + self, query: str, result_parser: QueryResultParser + ) -> List[MetricValue]: + async with InfluxDBClientAsync( + url=self.settings.url, + token=self.settings.token, + org=self.settings.org, + timeout=self.settings.timeout, + ) as async_client: + flux_result = await async_client.query_api().query(query) + return self.parse_response(flux_result, result_parser) + + def parse_response( + self, + flux_result: TableList, + result_parser: QueryResultParser, + ) -> List[MetricValue]: + query_results = [] + for table in flux_result: + for record in table.records: + query_results.append(result_parser.parse(record)) + return query_results diff --git a/app/clients/influxdb/influxdb_configuration.py b/app/clients/influxdb/influxdb_configuration.py deleted file mode 100644 index 4186023..0000000 --- a/app/clients/influxdb/influxdb_configuration.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import List - -from dataclasses import dataclass - - -@dataclass -class QueryConfig: - query: str - measurement_name: str - source: str - property: str - unit: str - is_aggregated: bool - - -@dataclass -class InfluxDBConfiguration: - url: str - token: str - org: str - pod_queries: List[QueryConfig] - node_queries: List[QueryConfig] - workload_queries: List[QueryConfig] diff --git a/app/clients/influxdb/influxdb_settings.py b/app/clients/influxdb/influxdb_settings.py new file mode 100644 index 0000000..065c072 --- /dev/null +++ b/app/clients/influxdb/influxdb_settings.py @@ -0,0 +1,9 @@ +from dataclasses import dataclass + + +@dataclass +class InfluxDBSettings: + url: str + token: str + org: str + timeout: int diff --git a/app/clients/influxdb/metric_value.py b/app/clients/influxdb/metric_value.py new file mode 100644 index 0000000..bc27751 --- /dev/null +++ b/app/clients/influxdb/metric_value.py @@ -0,0 +1,12 @@ +from typing import TypeAlias + +from dataclasses import dataclass + +MetricId: TypeAlias = str + + +@dataclass +class MetricValue: + metric_id: MetricId + timestamp: int + value: float diff --git a/app/clients/influxdb/mock_infuxdbclient.py b/app/clients/influxdb/mock_infuxdbclient.py new file mode 100644 index 0000000..d9a087e --- /dev/null +++ b/app/clients/influxdb/mock_infuxdbclient.py @@ -0,0 +1,18 @@ +from typing import Dict, List + +from app.clients.influxdb.influxdb_client import InfluxDBClient +from app.clients.influxdb.metric_value import MetricValue +from app.clients.influxdb.query_result_parser import QueryResultParser + + +class MockInfluxDBClient(InfluxDBClient): + results: Dict[str, List[MetricValue]] + + def __init__(self): + self.results = dict() + + def mock_query(self, query: str, client_response: List[MetricValue]) -> None: + self.results[query] = client_response + + async def query(self, query: str, _: QueryResultParser) -> List[MetricValue]: + return self.results.get(query) or [] diff --git a/app/clients/influxdb/query_result_parser.py b/app/clients/influxdb/query_result_parser.py new file mode 100644 index 0000000..e1d8615 --- /dev/null +++ b/app/clients/influxdb/query_result_parser.py @@ -0,0 +1,28 @@ +from typing import Any, Dict + +from datetime import datetime + +from app.clients.influxdb.metric_value import MetricValue + + +class QueryResultParser: + def get_timestamp(self, dt: Any) -> int: + if isinstance(dt, int) or isinstance(dt, float): + return int(dt) + elif isinstance(dt, datetime): + return int(dt.timestamp() * 1000) + else: + raise Exception( + f"Unable to convert value '{dt}' to timestamp. Unknown type {type(dt)}." + ) + + def get_float(self, value: Any) -> float: + if isinstance(value, int) or isinstance(value, float): + return value + else: + raise Exception( + f"Unable to convert value '{value}' to float. Unknown type {type(value)}." + ) + + def parse(self, row: Dict[str, Any]) -> MetricValue: + raise NotImplementedError diff --git a/app/clients/influxdb/simple_result_parser.py b/app/clients/influxdb/simple_result_parser.py new file mode 100644 index 0000000..6b5e6b7 --- /dev/null +++ b/app/clients/influxdb/simple_result_parser.py @@ -0,0 +1,17 @@ +from typing import Any, Dict + +from app.clients.influxdb.metric_value import MetricValue +from app.clients.influxdb.query_result_parser import QueryResultParser + + +class SimpleResultParser(QueryResultParser): + IDENTIFIER_FIELD: str = "identifier" + TIMESTAMP_FIELD: str = "timestamp" + VALUE_FIELD: str = "value" + + def parse(self, row: Dict[str, Any]) -> MetricValue: + return MetricValue( + row[self.IDENTIFIER_FIELD], + self.get_timestamp(row[self.TIMESTAMP_FIELD]), + self.get_float(row[self.VALUE_FIELD]), + ) diff --git a/app/clients/influxdb/test_influxdb_client.py b/app/clients/influxdb/test_influxdb_client.py new file mode 100644 index 0000000..6f2e761 --- /dev/null +++ b/app/clients/influxdb/test_influxdb_client.py @@ -0,0 +1,13 @@ +import asyncio +from unittest import TestCase + + +class InfluxDBClientTest(TestCase): + loop: asyncio.AbstractEventLoop + + def setUp(self): + self.loop = asyncio.get_event_loop() + self.maxDiff = None + + def test_integration(self): + pass diff --git a/app/clients/influxdb/test_simple_result_parser.py b/app/clients/influxdb/test_simple_result_parser.py new file mode 100644 index 0000000..718919b --- /dev/null +++ b/app/clients/influxdb/test_simple_result_parser.py @@ -0,0 +1,42 @@ +import datetime +from unittest import TestCase + +from dateutil.tz import tzutc + +from app.clients.influxdb.metric_value import MetricValue +from app.clients.influxdb.simple_result_parser import SimpleResultParser + + +class SimpleResultParserTest(TestCase): + def test_parse(self) -> None: + parser = SimpleResultParser() + + row = { + "result": "_result", + "table": 0, + "_start": datetime.datetime(2024, 5, 31, 7, 53, 2, 452746, tzinfo=tzutc()), + "_stop": datetime.datetime(2024, 5, 31, 8, 13, 2, 452746, tzinfo=tzutc()), + "timestamp": datetime.datetime(2024, 5, 31, 8, 0, tzinfo=tzutc()), + "value": 26237685760.0, + "_field": "node_memory_MemFree_bytes", + "_measurement": "prometheus_remote_write", + "app_kubernetes_io_component": "metrics", + "app_kubernetes_io_instance": "monitoring-stack", + "app_kubernetes_io_managed_by": "Helm", + "app_kubernetes_io_name": "prometheus-node-exporter", + "app_kubernetes_io_part_of": "prometheus-node-exporter", + "app_kubernetes_io_version": "1.7.0", + "helm_sh_chart": "prometheus-node-exporter-4.25.0", + "host": "telegraf-polling-service", + "instance": "10.14.1.160:9100", + "job": "kubernetes-service-endpoints", + "namespace": "monitoring", + "identifier": "glaciation-testm1w5-master01", + "service": "monitoring-stack-prometheus-node-exporter", + } + actual = parser.parse(row) + + self.assertEqual( + MetricValue("glaciation-testm1w5-master01", 1717142400000, 26237685760.0), + actual, + ) diff --git a/app/clients/k8s/k8s_client_impl.py b/app/clients/k8s/k8s_client_impl.py new file mode 100644 index 0000000..51b216f --- /dev/null +++ b/app/clients/k8s/k8s_client_impl.py @@ -0,0 +1,46 @@ +from typing import Any, Dict, List + +from kubernetes import config, dynamic +from kubernetes.client import api_client + +from app.clients.k8s.k8s_client import K8SClient +from app.clients.k8s.k8s_settings import K8SSettings + + +class K8SClientImpl(K8SClient): + client: dynamic.DynamicClient + + def __init__(self, settings: K8SSettings): + configuration = ( + config.load_incluster_config() + if settings.in_cluster + else config.load_kube_config() + ) + self.client = dynamic.DynamicClient( + api_client.ApiClient(configuration=configuration) + ) + + async def get_nodes(self) -> List[Dict[str, Any]]: + return await self.get_resource("Node") + + async def get_pods(self) -> List[Dict[str, Any]]: + return await self.get_resource("Pod") + + async def get_deployments(self) -> List[Dict[str, Any]]: + return await self.get_resource("Deployment") + + async def get_replicasets(self) -> List[Dict[str, Any]]: + return await self.get_resource("Replicaset") + + async def get_daemonsets(self) -> List[Dict[str, Any]]: + return await self.get_resource("Daemonset") + + async def get_statefullsets(self) -> List[Dict[str, Any]]: + return await self.get_resource("StatefulSet") + + async def get_jobs(self) -> List[Dict[str, Any]]: + return await self.get_resource("Job") + + async def get_resource(self, kind: str) -> List[Dict[str, Any]]: + api = self.client.resources.get(api_version="v1", kind=kind) + return [item.to_dict() for item in api.get().items] diff --git a/app/clients/k8s/k8s_settings.py b/app/clients/k8s/k8s_settings.py new file mode 100644 index 0000000..23110d6 --- /dev/null +++ b/app/clients/k8s/k8s_settings.py @@ -0,0 +1,6 @@ +from dataclasses import dataclass + + +@dataclass +class K8SSettings: + in_cluster: bool diff --git a/app/clients/metadata/test_metadata_service.py b/app/clients/metadata/test_metadata_service.py new file mode 100644 index 0000000..3a3abb0 --- /dev/null +++ b/app/clients/metadata/test_metadata_service.py @@ -0,0 +1,9 @@ +from unittest import TestCase + + +class MetadataServiceClientTest(TestCase): + def test_query(self): + pass + + def test_insert(self): + pass diff --git a/app/core/influxdb_repository.py b/app/core/influxdb_repository.py index a68533e..d8fb168 100644 --- a/app/core/influxdb_repository.py +++ b/app/core/influxdb_repository.py @@ -1,15 +1,33 @@ -from typing import Set +from typing import List, Set from dataclasses import dataclass, field from app.clients.influxdb.influxdb_client import InfluxDBClient +@dataclass +class MetricQuery: + measurement_name: str + query: str + result_parser: str + source: str + + +@dataclass +class QueryOptions: + pod_metric_queries: List[MetricQuery] + node_metric_queries: List[MetricQuery] + workload_metric_queries: List[MetricQuery] + + @dataclass class Metric: + identifier: str + kind: str measurement_name: str metric_name: str value: float + timestamp: int source: str @@ -26,7 +44,9 @@ class InfluxDBRepository: def __init__(self, client: InfluxDBClient): self.client = client - async def fetch(self) -> MetricsSnapshot: + async def fetch( + self, timestamp: int, query_options: QueryOptions + ) -> MetricsSnapshot: # query_api = self.client.query_api() # query = "" # result = await query_api.query(query) diff --git a/pyproject.toml b/pyproject.toml index 82e7ccd..4b3a611 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ httpx = "^0.27.0" pydantic-settings = "^2.2.1" pytest-vcr = "^1.0.2" influxdb-client = "^1.43.0" +aiocsv = "1.3.2" [tool.poetry.group.dev.dependencies] black = "^23.12" From d23ff294d497350bd60f4758e5fd55748b1bf0b1 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 31 May 2024 11:24:54 +0200 Subject: [PATCH 13/61] HHT-669: metadata service client --- app/clients/influxdb/test_influxdb_client.py | 13 ----- app/clients/k8s/test_k8s_client.py | 15 ------ ...ServiceClientTest.test_insert_failure.yaml | 36 ++++++++++++++ ...ServiceClientTest.test_insert_success.yaml | 36 ++++++++++++++ ...aServiceClientTest.test_query_success.yaml | 36 ++++++++++++++ .../metadata/metadata_service_client.py | 11 ++++- .../metadata/metadata_service_client_impl.py | 46 +++++++++++++++++ .../metadata/metadata_service_settings.py | 6 +++ .../metadata/mock_metadata_service_client.py | 49 +++++++++++++++++++ app/clients/metadata/model.py | 2 - app/clients/metadata/test_metadata_service.py | 9 ---- .../test_metadata_service_client_impl.py | 32 ++++++++++++ pyproject.toml | 1 + 13 files changed, 251 insertions(+), 41 deletions(-) delete mode 100644 app/clients/influxdb/test_influxdb_client.py create mode 100644 app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_failure.yaml create mode 100644 app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_success.yaml create mode 100644 app/clients/metadata/cassettes/MetadataServiceClientTest.test_query_success.yaml create mode 100644 app/clients/metadata/metadata_service_client_impl.py create mode 100644 app/clients/metadata/metadata_service_settings.py create mode 100644 app/clients/metadata/mock_metadata_service_client.py delete mode 100644 app/clients/metadata/model.py delete mode 100644 app/clients/metadata/test_metadata_service.py create mode 100644 app/clients/metadata/test_metadata_service_client_impl.py diff --git a/app/clients/influxdb/test_influxdb_client.py b/app/clients/influxdb/test_influxdb_client.py deleted file mode 100644 index 6f2e761..0000000 --- a/app/clients/influxdb/test_influxdb_client.py +++ /dev/null @@ -1,13 +0,0 @@ -import asyncio -from unittest import TestCase - - -class InfluxDBClientTest(TestCase): - loop: asyncio.AbstractEventLoop - - def setUp(self): - self.loop = asyncio.get_event_loop() - self.maxDiff = None - - def test_integration(self): - pass diff --git a/app/clients/k8s/test_k8s_client.py b/app/clients/k8s/test_k8s_client.py index ccb9fea..27aaf98 100644 --- a/app/clients/k8s/test_k8s_client.py +++ b/app/clients/k8s/test_k8s_client.py @@ -1,26 +1,11 @@ -from typing import Any, Dict, List - import asyncio -import dataclasses from unittest import TestCase from app.clients.k8s.k8s_client import ResourceSnapshot from app.clients.k8s.mock_k8s_client import MockK8SClient -@dataclasses.dataclass -class TestClass: - value: List[Dict[str, Any]] - value2: List[Dict[str, Any]] - - class K8SClientTest(TestCase): - loop: asyncio.AbstractEventLoop - - def setUp(self): - self.loop = asyncio.get_event_loop() - self.maxDiff = None - def test_get_resource_snapshot(self): client = MockK8SClient() client.mock_daemonsets([{"daemonsets": "fake"}]) diff --git a/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_failure.yaml b/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_failure.yaml new file mode 100644 index 0000000..3d4caed --- /dev/null +++ b/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_failure.yaml @@ -0,0 +1,36 @@ +interactions: +- request: + body: '{}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '2' + content-type: + - application/json + host: + - metadata-service + user-agent: + - python-httpx/0.27.0 + method: PATCH + uri: http://metadata-service/api/v0/graph + response: + content: '{"detail":"Not Found"}' + headers: + Connection: + - keep-alive + Content-Length: + - '22' + Content-Type: + - application/json + Date: + - Mon, 08 Apr 2024 09:45:47 GMT + Server: + - nginx/1.18.0 (Ubuntu) + http_version: HTTP/1.1 + status_code: 404 +version: 1 diff --git a/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_success.yaml b/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_success.yaml new file mode 100644 index 0000000..9c3468d --- /dev/null +++ b/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_success.yaml @@ -0,0 +1,36 @@ +interactions: +- request: + body: '{}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '2' + content-type: + - application/json + host: + - metadata-service + user-agent: + - python-httpx/0.27.0 + method: PATCH + uri: http://metadata-service/api/v0/graph + response: + content: '"Success"' + headers: + Connection: + - keep-alive + Content-Length: + - '9' + Content-Type: + - application/json + Date: + - Mon, 08 Apr 2024 09:45:47 GMT + Server: + - nginx/1.18.0 (Ubuntu) + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/app/clients/metadata/cassettes/MetadataServiceClientTest.test_query_success.yaml b/app/clients/metadata/cassettes/MetadataServiceClientTest.test_query_success.yaml new file mode 100644 index 0000000..e9c3950 --- /dev/null +++ b/app/clients/metadata/cassettes/MetadataServiceClientTest.test_query_success.yaml @@ -0,0 +1,36 @@ +interactions: +- request: + body: '{}' + headers: + accept: + - '*/*' + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '2' + content-type: + - application/json + host: + - metadata-service + user-agent: + - python-httpx/0.27.0 + method: POST + uri: http://metadata-service/api/v0/graph/search + response: + content: '[]' + headers: + Connection: + - keep-alive + Content-Length: + - '9' + Content-Type: + - application/json + Date: + - Mon, 08 Apr 2024 09:45:47 GMT + Server: + - nginx/1.18.0 (Ubuntu) + http_version: HTTP/1.1 + status_code: 200 +version: 1 diff --git a/app/clients/metadata/metadata_service_client.py b/app/clients/metadata/metadata_service_client.py index d367035..a1735a6 100644 --- a/app/clients/metadata/metadata_service_client.py +++ b/app/clients/metadata/metadata_service_client.py @@ -1,11 +1,18 @@ from typing import List -from app.clients.metadata.model import Triple +from dataclasses import dataclass + + +@dataclass +class Triple: + subject: str + relation: str + object: str class MetadataServiceClient: async def query(self, host_and_port: str, sparql: str) -> List[Triple]: raise NotImplementedError - async def insert(self, host_and_port: str, graph: str) -> None: + async def insert(self, host_and_port: str, message: str) -> None: raise NotImplementedError diff --git a/app/clients/metadata/metadata_service_client_impl.py b/app/clients/metadata/metadata_service_client_impl.py new file mode 100644 index 0000000..ffeddf9 --- /dev/null +++ b/app/clients/metadata/metadata_service_client_impl.py @@ -0,0 +1,46 @@ +from typing import List + +import httpx +from httpx import HTTPError + +from app.clients.metadata.metadata_service_client import MetadataServiceClient, Triple +from app.clients.metadata.metadata_service_settings import MetadataServiceSettings + + +class ClientError(Exception): + pass + + +class MetadataServiceClientImpl(MetadataServiceClient): + settings: MetadataServiceSettings + + def __init__(self, settings: MetadataServiceSettings): + self.settings = settings + + async def query(self, host_and_port: str, sparql: str) -> List[Triple]: + url = f"http://{host_and_port}/api/v0/graph/search" + async with httpx.AsyncClient() as client: + try: + response = await client.post( + url, + content=sparql, + headers=[("Content-Type", "application/json")], + ) + response.raise_for_status() + # TODO parse response + return [] + except HTTPError as e: + raise ClientError(e.args[0]) from e + + async def insert(self, host_and_port: str, graph: str) -> None: + url = f"http://{host_and_port}/api/v0/graph" + async with httpx.AsyncClient() as client: + try: + response = await client.patch( + url, + content=graph, + headers=[("Content-Type", "application/json")], + ) + response.raise_for_status() + except HTTPError as e: + raise ClientError(e.args[0]) from e diff --git a/app/clients/metadata/metadata_service_settings.py b/app/clients/metadata/metadata_service_settings.py new file mode 100644 index 0000000..184bd8b --- /dev/null +++ b/app/clients/metadata/metadata_service_settings.py @@ -0,0 +1,6 @@ +from pydantic_settings import BaseSettings + + +class MetadataServiceSettings(BaseSettings): + metadata_service_url: str = "metadata-service" + metadata_service_push_period_sec: int = 60 diff --git a/app/clients/metadata/mock_metadata_service_client.py b/app/clients/metadata/mock_metadata_service_client.py new file mode 100644 index 0000000..1f8031c --- /dev/null +++ b/app/clients/metadata/mock_metadata_service_client.py @@ -0,0 +1,49 @@ +from typing import Dict, List, Optional, TypeAlias + +from app.clients.metadata.metadata_service_client import MetadataServiceClient, Triple + +HostId: TypeAlias = str +SparQLQuery: TypeAlias = str +SerializedGraph: TypeAlias = str + + +class HostInteractions: + query_to_response: Dict[SparQLQuery, List[Triple]] + inserts: List[SerializedGraph] + + def add_query(self, sparql: SparQLQuery, result: List[Triple]) -> None: + self.query_to_response[sparql] = result + + def get_query_result(self, sparql: SparQLQuery) -> Optional[List[Triple]]: + return self.query_to_response.get(sparql) + + def add_insert(self, result: SerializedGraph) -> None: + self.inserts.append(result) + + def get_inserts(self) -> List[SerializedGraph]: + return self.inserts + + +class MockMetadataServiceClient(MetadataServiceClient): + hosts: Dict[HostId, HostInteractions] + + def mock_query( + self, host: HostId, sparql: SparQLQuery, result: List[Triple] + ) -> None: + if host not in self.hosts: + self.hosts[host] = HostInteractions() + self.hosts[host].add_query(sparql, result) + + async def query(self, host_and_port: HostId, sparql: SparQLQuery) -> List[Triple]: + host_queries = self.hosts.get(host_and_port) + if host_queries: + return host_queries.get_query_result(sparql) or [] + return [] + + def get_inserts(self, host: HostId) -> List[SerializedGraph]: + return self.hosts[host].get_inserts() + + async def insert(self, host_and_port: HostId, message: SerializedGraph) -> None: + if host_and_port not in self.hosts: + self.hosts[host_and_port] = HostInteractions() + self.hosts[host_and_port].add_insert(message) diff --git a/app/clients/metadata/model.py b/app/clients/metadata/model.py deleted file mode 100644 index d491160..0000000 --- a/app/clients/metadata/model.py +++ /dev/null @@ -1,2 +0,0 @@ -class Triple: - pass diff --git a/app/clients/metadata/test_metadata_service.py b/app/clients/metadata/test_metadata_service.py deleted file mode 100644 index 3a3abb0..0000000 --- a/app/clients/metadata/test_metadata_service.py +++ /dev/null @@ -1,9 +0,0 @@ -from unittest import TestCase - - -class MetadataServiceClientTest(TestCase): - def test_query(self): - pass - - def test_insert(self): - pass diff --git a/app/clients/metadata/test_metadata_service_client_impl.py b/app/clients/metadata/test_metadata_service_client_impl.py new file mode 100644 index 0000000..181c9c6 --- /dev/null +++ b/app/clients/metadata/test_metadata_service_client_impl.py @@ -0,0 +1,32 @@ +import asyncio + +from vcr.unittest import VCRTestCase + +from app.clients.metadata.metadata_service_client_impl import ( + ClientError, + MetadataServiceClientImpl, +) +from app.clients.metadata.metadata_service_settings import MetadataServiceSettings + + +class MetadataServiceClientTest(VCRTestCase): + def test_insert_success(self): + settings = MetadataServiceSettings() + client = MetadataServiceClientImpl(settings) + asyncio.run(client.insert("metadata-service", "{}")) + + def test_insert_failure(self): + settings = MetadataServiceSettings() + client = MetadataServiceClientImpl(settings) + with self.assertRaises(ClientError) as e: + asyncio.run(client.insert("metadata-service", "{}")) + msg = ( + "Client error '404 Not Found' for url 'http://metadata-service/api/v0/graph'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404" + ) + self.assertEqual(msg, str(e.exception)) + + def test_query_success(self): + settings = MetadataServiceSettings() + client = MetadataServiceClientImpl(settings) + asyncio.run(client.query("metadata-service", "query")) diff --git a/pyproject.toml b/pyproject.toml index 4b3a611..8cfff7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,7 @@ pre-commit = "^3.7.0" [tool.poetry.group.test.dependencies] pytest = "^7.4" pytest-mock = "^3.12.0" +vcrpy = "6.0.1" [tool.isort] # https://github.com/timothycrosley/isort/ From 33a3be8274224e1119170db370895f40e0ecba2d Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 31 May 2024 14:25:23 +0200 Subject: [PATCH 14/61] HHT-669: core processing --- .../__init__.py | 0 ...ServiceClientTest.test_insert_failure.yaml | 0 ...ServiceClientTest.test_insert_success.yaml | 0 ...aServiceClientTest.test_query_success.yaml | 0 .../metadata_service_client.py | 0 .../metadata_service_client_impl.py | 9 ++- .../metadata_service_settings.py | 0 .../mock_metadata_service_client.py | 18 ++++- .../test_metadata_service_client_impl.py | 6 +- app/context.py | 37 ++++++++-- app/core/__fixture__/simple_node.jsonld | 50 +++++++++++++ app/core/async_queue.py | 27 +++++++ app/core/dkg_slice_store.py | 32 ++------ app/core/influxdb_repository.py | 52 +++++++------ app/core/kg_builder.py | 64 ++++++++++++++-- app/core/kg_repository.py | 44 +++++++---- app/core/kg_result_parser.py | 9 +++ app/core/kg_slice_assembler.py | 21 ++++++ app/core/kg_tuple_parser.py | 11 +++ app/core/kg_updater.py | 32 ++++++-- app/core/slice_for_node_strategy.py | 11 +++ app/core/slice_strategy.py | 9 +++ app/core/test_async_queue.py | 23 ++++++ app/core/test_graph_fixture.py | 73 +++++++++++++++++++ app/core/test_influxdb_repository.py | 59 +++++++++++++++ app/core/test_kg_builder.py | 67 +++++++++++++++++ app/core/test_kg_repository.py | 37 ++++++++++ app/core/test_kg_tuple_parser.py | 11 +++ app/core/test_kg_updater.py | 57 +++++++++++++++ app/core/types.py | 36 +++++++++ app/k8s_transform/upper_ontology_base.py | 3 + app/kg/inmemory_graph.py | 26 +++++++ app/kg/test_inmemory_graph.py | 31 ++++++++ app/main.py | 26 +++++++ app/serialize/graph_serializer.py | 7 +- app/test_context.py | 22 ++++++ pyproject.toml | 1 + 37 files changed, 821 insertions(+), 90 deletions(-) rename app/clients/{metadata => metadata_service}/__init__.py (100%) rename app/clients/{metadata => metadata_service}/cassettes/MetadataServiceClientTest.test_insert_failure.yaml (100%) rename app/clients/{metadata => metadata_service}/cassettes/MetadataServiceClientTest.test_insert_success.yaml (100%) rename app/clients/{metadata => metadata_service}/cassettes/MetadataServiceClientTest.test_query_success.yaml (100%) rename app/clients/{metadata => metadata_service}/metadata_service_client.py (100%) rename app/clients/{metadata => metadata_service}/metadata_service_client_impl.py (87%) rename app/clients/{metadata => metadata_service}/metadata_service_settings.py (100%) rename app/clients/{metadata => metadata_service}/mock_metadata_service_client.py (79%) rename app/clients/{metadata => metadata_service}/test_metadata_service_client_impl.py (86%) create mode 100644 app/core/__fixture__/simple_node.jsonld create mode 100644 app/core/async_queue.py create mode 100644 app/core/kg_result_parser.py create mode 100644 app/core/kg_slice_assembler.py create mode 100644 app/core/kg_tuple_parser.py create mode 100644 app/core/slice_for_node_strategy.py create mode 100644 app/core/slice_strategy.py create mode 100644 app/core/test_async_queue.py create mode 100644 app/core/test_graph_fixture.py create mode 100644 app/core/test_influxdb_repository.py create mode 100644 app/core/test_kg_builder.py create mode 100644 app/core/test_kg_repository.py create mode 100644 app/core/test_kg_tuple_parser.py create mode 100644 app/core/test_kg_updater.py create mode 100644 app/core/types.py create mode 100644 app/main.py create mode 100644 app/test_context.py diff --git a/app/clients/metadata/__init__.py b/app/clients/metadata_service/__init__.py similarity index 100% rename from app/clients/metadata/__init__.py rename to app/clients/metadata_service/__init__.py diff --git a/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_failure.yaml b/app/clients/metadata_service/cassettes/MetadataServiceClientTest.test_insert_failure.yaml similarity index 100% rename from app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_failure.yaml rename to app/clients/metadata_service/cassettes/MetadataServiceClientTest.test_insert_failure.yaml diff --git a/app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_success.yaml b/app/clients/metadata_service/cassettes/MetadataServiceClientTest.test_insert_success.yaml similarity index 100% rename from app/clients/metadata/cassettes/MetadataServiceClientTest.test_insert_success.yaml rename to app/clients/metadata_service/cassettes/MetadataServiceClientTest.test_insert_success.yaml diff --git a/app/clients/metadata/cassettes/MetadataServiceClientTest.test_query_success.yaml b/app/clients/metadata_service/cassettes/MetadataServiceClientTest.test_query_success.yaml similarity index 100% rename from app/clients/metadata/cassettes/MetadataServiceClientTest.test_query_success.yaml rename to app/clients/metadata_service/cassettes/MetadataServiceClientTest.test_query_success.yaml diff --git a/app/clients/metadata/metadata_service_client.py b/app/clients/metadata_service/metadata_service_client.py similarity index 100% rename from app/clients/metadata/metadata_service_client.py rename to app/clients/metadata_service/metadata_service_client.py diff --git a/app/clients/metadata/metadata_service_client_impl.py b/app/clients/metadata_service/metadata_service_client_impl.py similarity index 87% rename from app/clients/metadata/metadata_service_client_impl.py rename to app/clients/metadata_service/metadata_service_client_impl.py index ffeddf9..8693696 100644 --- a/app/clients/metadata/metadata_service_client_impl.py +++ b/app/clients/metadata_service/metadata_service_client_impl.py @@ -3,8 +3,13 @@ import httpx from httpx import HTTPError -from app.clients.metadata.metadata_service_client import MetadataServiceClient, Triple -from app.clients.metadata.metadata_service_settings import MetadataServiceSettings +from app.clients.metadata_service.metadata_service_client import ( + MetadataServiceClient, + Triple, +) +from app.clients.metadata_service.metadata_service_settings import ( + MetadataServiceSettings, +) class ClientError(Exception): diff --git a/app/clients/metadata/metadata_service_settings.py b/app/clients/metadata_service/metadata_service_settings.py similarity index 100% rename from app/clients/metadata/metadata_service_settings.py rename to app/clients/metadata_service/metadata_service_settings.py diff --git a/app/clients/metadata/mock_metadata_service_client.py b/app/clients/metadata_service/mock_metadata_service_client.py similarity index 79% rename from app/clients/metadata/mock_metadata_service_client.py rename to app/clients/metadata_service/mock_metadata_service_client.py index 1f8031c..aa4b45b 100644 --- a/app/clients/metadata/mock_metadata_service_client.py +++ b/app/clients/metadata_service/mock_metadata_service_client.py @@ -1,6 +1,9 @@ from typing import Dict, List, Optional, TypeAlias -from app.clients.metadata.metadata_service_client import MetadataServiceClient, Triple +from app.clients.metadata_service.metadata_service_client import ( + MetadataServiceClient, + Triple, +) HostId: TypeAlias = str SparQLQuery: TypeAlias = str @@ -11,6 +14,10 @@ class HostInteractions: query_to_response: Dict[SparQLQuery, List[Triple]] inserts: List[SerializedGraph] + def __init__(self): + self.query_to_response = dict() + self.inserts = [] + def add_query(self, sparql: SparQLQuery, result: List[Triple]) -> None: self.query_to_response[sparql] = result @@ -27,6 +34,9 @@ def get_inserts(self) -> List[SerializedGraph]: class MockMetadataServiceClient(MetadataServiceClient): hosts: Dict[HostId, HostInteractions] + def __init__(self): + self.hosts = dict() + def mock_query( self, host: HostId, sparql: SparQLQuery, result: List[Triple] ) -> None: @@ -41,7 +51,11 @@ async def query(self, host_and_port: HostId, sparql: SparQLQuery) -> List[Triple return [] def get_inserts(self, host: HostId) -> List[SerializedGraph]: - return self.hosts[host].get_inserts() + host_interactions = self.hosts.get(host) + if host_interactions: + return host_interactions.get_inserts() + else: + return [] async def insert(self, host_and_port: HostId, message: SerializedGraph) -> None: if host_and_port not in self.hosts: diff --git a/app/clients/metadata/test_metadata_service_client_impl.py b/app/clients/metadata_service/test_metadata_service_client_impl.py similarity index 86% rename from app/clients/metadata/test_metadata_service_client_impl.py rename to app/clients/metadata_service/test_metadata_service_client_impl.py index 181c9c6..3bfddc3 100644 --- a/app/clients/metadata/test_metadata_service_client_impl.py +++ b/app/clients/metadata_service/test_metadata_service_client_impl.py @@ -2,11 +2,13 @@ from vcr.unittest import VCRTestCase -from app.clients.metadata.metadata_service_client_impl import ( +from app.clients.metadata_service.metadata_service_client_impl import ( ClientError, MetadataServiceClientImpl, ) -from app.clients.metadata.metadata_service_settings import MetadataServiceSettings +from app.clients.metadata_service.metadata_service_settings import ( + MetadataServiceSettings, +) class MetadataServiceClientTest(VCRTestCase): diff --git a/app/context.py b/app/context.py index 026c737..4244871 100644 --- a/app/context.py +++ b/app/context.py @@ -1,40 +1,61 @@ +from typing import Any, List + import asyncio from app.clients.influxdb.influxdb_client import InfluxDBClient from app.clients.k8s.k8s_client import K8SClient -from app.clients.metadata.metadata_service_client import MetadataServiceClient +from app.clients.metadata_service.metadata_service_client import MetadataServiceClient +from app.core.async_queue import AsyncQueue from app.core.dkg_slice_store import DKGSliceStore from app.core.influxdb_repository import InfluxDBRepository -from app.core.k8s_repository import K8SRepository from app.core.kg_builder import KGBuilder from app.core.kg_repository import KGRepository from app.core.kg_updater import KGUpdater +from app.core.types import DKGSlice +from app.serialize.jsonld_configuration import JsonLDConfiguration class KGExporterContext: builder: KGBuilder updater: KGUpdater + queue: AsyncQueue[DKGSlice] runner: asyncio.Runner dkg_slice_store: DKGSliceStore + running: asyncio.Event + tasks: List[asyncio.Task[Any]] def __init__( self, metadata_client: MetadataServiceClient, k8s_client: K8SClient, influxdb_client: InfluxDBClient, + jsonld_config: JsonLDConfiguration, ): - kg_repository = KGRepository(metadata_client) - k8s_repository = K8SRepository(k8s_client) + kg_repository = KGRepository(metadata_client, jsonld_config) influxdb_repository = InfluxDBRepository(influxdb_client) + self.running = asyncio.Event() + self.queue = AsyncQueue[DKGSlice]() self.dkg_slice_store = DKGSliceStore() self.builder = KGBuilder( - self.dkg_slice_store, k8s_repository, kg_repository, influxdb_repository + self.running, + self.queue, + k8s_client, + kg_repository, + influxdb_repository, ) - self.updater = KGUpdater(self.dkg_slice_store, kg_repository) + self.updater = KGUpdater(self.running, self.queue, kg_repository) self.runner = asyncio.Runner() + self.tasks = [] def start(self) -> None: - pass + if self.running.is_set(): + return + self.running.set() + self.runner.run(self.run_tasks()) + + async def run_tasks(self) -> None: + self.tasks.append(asyncio.create_task(self.builder.run())) + self.tasks.append(asyncio.create_task(self.updater.run())) def stop(self) -> None: - pass + self.running.clear() diff --git a/app/core/__fixture__/simple_node.jsonld b/app/core/__fixture__/simple_node.jsonld new file mode 100644 index 0000000..30cae3a --- /dev/null +++ b/app/core/__fixture__/simple_node.jsonld @@ -0,0 +1,50 @@ +{ + "@context": { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + }, + "@graph": [ + { + "@id": "glc:CPU.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Capacity" + }, + { + "@id": "glc:Core", + "@type": "glc:MeasurementUnit", + "glc:hasDescription": "Cores", + "glc:hasID": "glc:Core" + }, + { + "@id": "glc:ResourceSpecification", + "@type": "glc:MeasuringResource", + "glc:hasDescription": "Test", + "glc:hasID": "glc:ResourceSpecification", + "glc:makes": { + "@id": "glc:measurement1", + "@type": "glc:Measurement", + "glc:hasDescription": "CPU.MAX", + "glc:hasTimestamp": 1700000000, + "glc:hasValue": 42.0, + "glc:hasID": "glc:measurement1", + "glc:measuredIn": "glc:Core", + "glc:relatesToMeasurementProperty": "glc:CPU.Capacity" + } + }, + { + "@id": "glc:cpu", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "CPU", + "glc:hasID": "glc:cpu" + }, + { + "@id": "glc:node1", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Node", + "glc:hasID": "glc:node1", + "glc:hasSubResource": "glc:cpu" + } + ] +} diff --git a/app/core/async_queue.py b/app/core/async_queue.py new file mode 100644 index 0000000..94863b5 --- /dev/null +++ b/app/core/async_queue.py @@ -0,0 +1,27 @@ +from typing import Generic, Optional, TypeVar + +import asyncio + +T = TypeVar("T") + + +class AsyncQueue(Generic[T]): + elements: asyncio.Queue[T] + + def __init__(self): + self.elements = asyncio.Queue() + + def get_nowait(self) -> Optional[T]: + try: + return self.elements.get_nowait() + except asyncio.QueueEmpty: + return None + + def put_nowait(self, element: T) -> None: + self.elements.put_nowait(element) + + async def get(self) -> T: + return await self.elements.get() + + async def put(self, element: T) -> None: + await self.elements.put(element) diff --git a/app/core/dkg_slice_store.py b/app/core/dkg_slice_store.py index 9c5110b..39fe863 100644 --- a/app/core/dkg_slice_store.py +++ b/app/core/dkg_slice_store.py @@ -1,38 +1,18 @@ -from typing import Dict, List, Set +from typing import Dict -from dataclasses import dataclass - -from app.kg.graph import Graph - - -@dataclass -class KGSliceId: - node_ip: str - port: int - - -@dataclass -class DKGSlice: - slice_id: KGSliceId - graph: Graph - timestamp: int +from app.core.types import DKGSlice, KGSliceId class DKGSliceStore: slices: Dict[KGSliceId, DKGSlice] - updates: Set[KGSliceId] def __init__(self): self.slices = {} - self.updates = set() - def update(self, slice: DKGSlice) -> None: + def update(self, slice: DKGSlice) -> bool: existing = self.slices.get(slice.slice_id) if existing != slice: self.slices[slice.slice_id] = slice - self.updates.add(slice.slice_id) - - def drain_updates(self) -> List[DKGSlice]: - to_consume = self.updates - self.updates = set() - return [self.slices[slice_id] for slice_id in to_consume] + return True + else: + return False diff --git a/app/core/influxdb_repository.py b/app/core/influxdb_repository.py index d8fb168..cca5959 100644 --- a/app/core/influxdb_repository.py +++ b/app/core/influxdb_repository.py @@ -1,25 +1,33 @@ -from typing import List, Set +from typing import List -from dataclasses import dataclass, field +import asyncio +from dataclasses import dataclass +from enum import StrEnum from app.clients.influxdb.influxdb_client import InfluxDBClient +from app.clients.influxdb.metric_value import MetricValue +from app.clients.influxdb.query_result_parser import QueryResultParser +from app.clients.influxdb.simple_result_parser import SimpleResultParser + + +class ResultParserId(StrEnum): + SIMPLE_RESULT_PARSER = "SimpleResultParser" + + def get_by_name(self) -> QueryResultParser: + if self.name == "SIMPLE_RESULT_PARSER": + return SimpleResultParser() + else: + raise Exception(f"Unknown parser for {self}") @dataclass class MetricQuery: measurement_name: str query: str - result_parser: str + result_parser: ResultParserId source: str -@dataclass -class QueryOptions: - pod_metric_queries: List[MetricQuery] - node_metric_queries: List[MetricQuery] - workload_metric_queries: List[MetricQuery] - - @dataclass class Metric: identifier: str @@ -31,23 +39,19 @@ class Metric: source: str -@dataclass -class MetricsSnapshot: - pod_metrics: Set[Metric] = field(default_factory=set) - node_metrics: Set[Metric] = field(default_factory=set) - deployment_metrics: Set[Metric] = field(default_factory=set) - - class InfluxDBRepository: client: InfluxDBClient def __init__(self, client: InfluxDBClient): self.client = client - async def fetch( - self, timestamp: int, query_options: QueryOptions - ) -> MetricsSnapshot: - # query_api = self.client.query_api() - # query = "" - # result = await query_api.query(query) - return MetricsSnapshot() + async def query_many( + self, now: int, queries: List[MetricQuery] + ) -> List[MetricValue]: + query_futures = [self.query_one(now, query) for query in queries] + query_results: List[List[MetricValue]] = await asyncio.gather(*query_futures) + return [element for elements in query_results for element in elements] + + async def query_one(self, now: int, query: MetricQuery) -> List[MetricValue]: + result_parser = query.result_parser.get_by_name() + return await self.client.query(query.query, result_parser) diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index cac77a1..21c7ac5 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -1,26 +1,80 @@ +import asyncio + +from loguru import logger + from app.clients.k8s.k8s_client import K8SClient -from app.core.dkg_slice_store import DKGSliceStore +from app.core.async_queue import AsyncQueue from app.core.influxdb_repository import InfluxDBRepository from app.core.kg_repository import KGRepository +from app.core.kg_slice_assembler import KGSliceAssembler +from app.core.slice_for_node_strategy import SliceForNodeStrategy +from app.core.slice_strategy import SliceStrategy +from app.core.types import DKGSlice, QueryOptions class KGBuilder: - dkg_slice_store: DKGSliceStore + running: asyncio.Event k8s_client: K8SClient + queue: AsyncQueue[DKGSlice] kg_repository: KGRepository influxdb_repository: InfluxDBRepository + query_options: QueryOptions + slice_strategy: SliceStrategy + slice_assembler: KGSliceAssembler def __init__( self, - dkg_slice_store: DKGSliceStore, + running: asyncio.Event, + queue: AsyncQueue[DKGSlice], k8s_client: K8SClient, kg_repository: KGRepository, influxdb_repository: InfluxDBRepository, ): - self.dkg_slice_store = dkg_slice_store + self.running = running self.k8s_client = k8s_client + self.queue = queue self.kg_repository = kg_repository self.influxdb_repository = influxdb_repository + self.query_options = QueryOptions( + pod_queries=[], node_queries=[], workload_queries=[] + ) + self.slice_strategy = SliceForNodeStrategy() + self.slice_assembler = KGSliceAssembler() async def run(self) -> None: - pass + while self.running.is_set(): + now = 0 + ( + cluster_snapshot, + pod_metrics, + node_metrics, + workload_metrics, + ) = await asyncio.gather( + self.k8s_client.fetch_snapshot(), + self.influxdb_repository.query_many( + now, self.query_options.pod_queries + ), + self.influxdb_repository.query_many( + now, self.query_options.node_queries + ), + self.influxdb_repository.query_many( + now, self.query_options.workload_queries + ), + ) + logger.debug(cluster_snapshot) + logger.debug(pod_metrics) + logger.debug(node_metrics) + logger.debug(workload_metrics) + + slice_ids = self.slice_strategy.get_slices(cluster_snapshot) + for slice_id in slice_ids: + slice = self.slice_assembler.assemble( + now=now, + slice_id=slice_id, + cluster_snapshot=cluster_snapshot, + pod_metrics=pod_metrics, + node_metrics=node_metrics, + workload_metrics=workload_metrics, + ) + self.queue.put_nowait(slice) + await asyncio.sleep(30) diff --git a/app/core/kg_repository.py b/app/core/kg_repository.py index 86ca5bc..bcc676f 100644 --- a/app/core/kg_repository.py +++ b/app/core/kg_repository.py @@ -1,23 +1,37 @@ -from dataclasses import dataclass +from io import StringIO -from app.clients.metadata.metadata_service_client import MetadataServiceClient +from app.clients.metadata_service.metadata_service_client import MetadataServiceClient +from app.core.kg_result_parser import KGResultParser +from app.core.types import KGSliceId from app.kg.graph import Graph -from app.kg.inmemory_graph import InMemoryGraph - - -@dataclass -class ExistingResources: - pass +from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.serialize.jsonld_serializer import JsonLDSerialializer class KGRepository: metadata_client: MetadataServiceClient + jsonld_config: JsonLDConfiguration - def __init__(self, metadata_client: MetadataServiceClient): + def __init__( + self, metadata_client: MetadataServiceClient, jsonld_config: JsonLDConfiguration + ): self.metadata_client = metadata_client - - async def query(self, query: str) -> Graph: - return InMemoryGraph() - - async def update(self, graph: Graph) -> None: - pass + self.jsonld_config = jsonld_config + + async def query( + self, slice_id: KGSliceId, query: str, result_parser: KGResultParser + ) -> Graph: + host_and_port = slice_id.get_host_port() + result = await self.metadata_client.query(host_and_port, query) + return result_parser.parse(result) + + async def update(self, slice_id: KGSliceId, graph: Graph) -> None: + graph_str = self.to_jsonld(graph) + host_and_port = slice_id.get_host_port() + await self.metadata_client.insert(host_and_port, graph_str) + + def to_jsonld(self, graph: Graph) -> str: + serializer = JsonLDSerialializer(self.jsonld_config) + out = StringIO() + serializer.write(out, graph) + return out.getvalue() diff --git a/app/core/kg_result_parser.py b/app/core/kg_result_parser.py new file mode 100644 index 0000000..cd269b9 --- /dev/null +++ b/app/core/kg_result_parser.py @@ -0,0 +1,9 @@ +from typing import List + +from app.clients.metadata_service.metadata_service_client import Triple +from app.kg.graph import Graph + + +class KGResultParser: + def parse(self, result: List[Triple]) -> Graph: + raise NotImplementedError diff --git a/app/core/kg_slice_assembler.py b/app/core/kg_slice_assembler.py new file mode 100644 index 0000000..d889e34 --- /dev/null +++ b/app/core/kg_slice_assembler.py @@ -0,0 +1,21 @@ +from typing import List + +from app.clients.influxdb.metric_value import MetricValue +from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.types import DKGSlice, KGSliceId +from app.kg.inmemory_graph import InMemoryGraph + + +class KGSliceAssembler: + def assemble( + self, + now: int, + slice_id: KGSliceId, + cluster_snapshot: ResourceSnapshot, + pod_metrics: List[MetricValue], + node_metrics: List[MetricValue], + workload_metrics: List[MetricValue], + ) -> DKGSlice: + graph = InMemoryGraph() + slice = DKGSlice(slice_id, graph, now) + return slice diff --git a/app/core/kg_tuple_parser.py b/app/core/kg_tuple_parser.py new file mode 100644 index 0000000..e3b1e6d --- /dev/null +++ b/app/core/kg_tuple_parser.py @@ -0,0 +1,11 @@ +from typing import List + +from app.clients.metadata_service.metadata_service_client import Triple +from app.core.kg_result_parser import KGResultParser +from app.kg.graph import Graph +from app.kg.inmemory_graph import InMemoryGraph + + +class KGTupleParser(KGResultParser): + def parse(self, result: List[Triple]) -> Graph: + return InMemoryGraph() diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py index 67e2ba9..6938bd5 100644 --- a/app/core/kg_updater.py +++ b/app/core/kg_updater.py @@ -1,14 +1,36 @@ -from app.core.dkg_slice_store import DKGSliceStore +import asyncio + +from loguru import logger + +from app.core.async_queue import AsyncQueue from app.core.kg_repository import KGRepository +from app.core.types import DKGSlice class KGUpdater: - dkg_slice_store: DKGSliceStore + queue: AsyncQueue[DKGSlice] kg_repository: KGRepository + running: asyncio.Event - def __init__(self, dkg_slice_store: DKGSliceStore, kg_repository: KGRepository): - self.dkg_slice_store = dkg_slice_store + def __init__( + self, + running: asyncio.Event, + queue: AsyncQueue[DKGSlice], + kg_repository: KGRepository, + ): + self.queue = queue self.kg_repository = kg_repository + self.running = running async def run(self) -> None: - pass + while self.running.is_set(): + slice = self.queue.get_nowait() + if slice: + logger.debug( + "updating slice {slice}, with timestamp {timestamp}", + slice=slice.slice_id, + timestamp=slice.timestamp, + ) + await self.kg_repository.update(slice.slice_id, slice.graph) + else: + await asyncio.sleep(0.3) diff --git a/app/core/slice_for_node_strategy.py b/app/core/slice_for_node_strategy.py new file mode 100644 index 0000000..d3432ea --- /dev/null +++ b/app/core/slice_for_node_strategy.py @@ -0,0 +1,11 @@ +from typing import List + +from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.slice_strategy import SliceStrategy +from app.core.types import KGSliceId + + +class SliceForNodeStrategy(SliceStrategy): + def get_slices(self, snapshot: ResourceSnapshot) -> List[KGSliceId]: + slice_id = KGSliceId("127.0.0.1", 80) + return [slice_id] diff --git a/app/core/slice_strategy.py b/app/core/slice_strategy.py new file mode 100644 index 0000000..d64552a --- /dev/null +++ b/app/core/slice_strategy.py @@ -0,0 +1,9 @@ +from typing import List + +from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.types import KGSliceId + + +class SliceStrategy: + def get_slices(self, snapshot: ResourceSnapshot) -> List[KGSliceId]: + raise NotImplementedError diff --git a/app/core/test_async_queue.py b/app/core/test_async_queue.py new file mode 100644 index 0000000..ed6c4b7 --- /dev/null +++ b/app/core/test_async_queue.py @@ -0,0 +1,23 @@ +import asyncio +from unittest import TestCase + +from app.core.async_queue import AsyncQueue + + +class AsyncQueueTest(TestCase): + def test_nowait(self) -> None: + queue = AsyncQueue[int]() + self.assertEqual(None, queue.get_nowait()) + queue.put_nowait(1) + queue.put_nowait(2) + self.assertEqual(1, queue.get_nowait()) + self.assertEqual(2, queue.get_nowait()) + self.assertEqual(None, queue.get_nowait()) + + def test_wait(self) -> None: + runner = asyncio.Runner() + queue = AsyncQueue[int]() + queue.put_nowait(1) + queue.put_nowait(2) + self.assertEqual(1, runner.run(queue.get())) + self.assertEqual(2, runner.run(queue.get())) diff --git a/app/core/test_graph_fixture.py b/app/core/test_graph_fixture.py new file mode 100644 index 0000000..319278b --- /dev/null +++ b/app/core/test_graph_fixture.py @@ -0,0 +1,73 @@ +from typing import Any, Dict, Tuple + +import json +from io import FileIO + +from app.k8s_transform.upper_ontology_base import UpperOntologyBase +from app.kg.graph import Graph +from app.kg.id_base import IdBase +from app.kg.inmemory_graph import InMemoryGraph +from app.kg.iri import IRI +from app.serialize.jsonld_configuration import JsonLDConfiguration + + +class TestTransformer(UpperOntologyBase): + def __init__(self, graph: Graph): + super().__init__(graph) + + +class TestGraphFixture: + def simple_node(self) -> Tuple[Graph, str]: + graph = InMemoryGraph() + node1 = IRI("glc", "node1") + cpu = IRI("glc", "cpu") + measurement_id = IRI("glc", "measurement1") + + transformer = TestTransformer(graph) + transformer.add_work_producing_resource(node1, "Node") + transformer.add_work_producing_resource(cpu, "CPU") + transformer.add_subresource(node1, cpu) + transformer.add_unit(UpperOntologyBase.UNIT_CPU_CORE_ID, "Cores") + transformer.add_measurement_property( + UpperOntologyBase.PROPERTY_CPU_CAPACITY, None + ) + transformer.add_measuring_resource( + UpperOntologyBase.MEASURING_RESOURCE_NODE_K8S_SPEC_ID, "Test" + ) + transformer.add_measurement( + measurement_id, + "CPU.MAX", + 42.0, + 1700000000, + UpperOntologyBase.UNIT_CPU_CORE_ID, + UpperOntologyBase.PROPERTY_CPU_CAPACITY, + UpperOntologyBase.MEASURING_RESOURCE_NODE_K8S_SPEC_ID, + ) + + serialized = self.load_json("simple_node") + return graph, json.dumps(serialized) + + def get_jsonld_config(self) -> JsonLDConfiguration: + contexts: Dict[IdBase, Dict[str, Any]] = { + JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + } + } + return JsonLDConfiguration( + contexts, + { + UpperOntologyBase.WORK_PRODUCING_RESOURCE, + UpperOntologyBase.NON_WORK_PRODUCING_RESOURCE, + UpperOntologyBase.ASPECT, + UpperOntologyBase.MEASUREMENT_PROPERTY, + UpperOntologyBase.MEASURING_RESOURCE, + UpperOntologyBase.MEASUREMENT_UNIT, + }, + ) + + def load_json(self, name: str) -> Dict[str, Any]: + with FileIO(f"app/core/__fixture__/{name}.jsonld") as f: + return json.load(f) # type: ignore diff --git a/app/core/test_influxdb_repository.py b/app/core/test_influxdb_repository.py new file mode 100644 index 0000000..7ccfdd9 --- /dev/null +++ b/app/core/test_influxdb_repository.py @@ -0,0 +1,59 @@ +import asyncio +from unittest import TestCase + +from app.clients.influxdb.metric_value import MetricValue +from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient +from app.core.influxdb_repository import InfluxDBRepository, MetricQuery, ResultParserId + + +class InfluxDBRepositoryTest(TestCase): + client: MockInfluxDBClient + repository: InfluxDBRepository + + def setUp(self) -> None: + self.client = MockInfluxDBClient() + self.repository = InfluxDBRepository(self.client) + + def test_query_one(self) -> None: + expected = MetricValue("id", 100500, 42.0) + self.client.mock_query("test_query", [expected]) + now = 1 + query = MetricQuery( + measurement_name="measurement", + query="test_query", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + source="source", + ) + + actual = asyncio.run(self.repository.query_one(now, query)) + self.assertEqual([expected], actual) + + def test_query_many(self) -> None: + expected1 = MetricValue("id1", 100500, 41.0) + expected2 = MetricValue("id2", 100500, 42.0) + expected3 = MetricValue("id3", 100500, 43.0) + self.client.mock_query("test_query1", [expected1]) + self.client.mock_query("test_query2", [expected2]) + self.client.mock_query("test_query3", [expected3]) + now = 1 + query1 = MetricQuery( + measurement_name="measurement", + query="test_query1", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + source="source", + ) + query2 = MetricQuery( + measurement_name="measurement", + query="test_query2", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + source="source", + ) + query3 = MetricQuery( + measurement_name="measurement", + query="test_query3", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + source="source", + ) + + actual = asyncio.run(self.repository.query_many(now, [query1, query2, query3])) + self.assertEqual([expected1, expected2, expected3], actual) diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py new file mode 100644 index 0000000..fa2bb3e --- /dev/null +++ b/app/core/test_kg_builder.py @@ -0,0 +1,67 @@ +import asyncio +import datetime +from unittest import TestCase + +from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient +from app.clients.k8s.mock_k8s_client import MockK8SClient +from app.clients.metadata_service.mock_metadata_service_client import ( + MockMetadataServiceClient, +) +from app.core.async_queue import AsyncQueue +from app.core.influxdb_repository import InfluxDBRepository +from app.core.kg_builder import KGBuilder +from app.core.kg_repository import KGRepository +from app.core.test_graph_fixture import TestGraphFixture +from app.core.types import DKGSlice, KGSliceId +from app.kg.inmemory_graph import InMemoryGraph + + +class KGBuilderTest(TestCase, TestGraphFixture): + client: MockMetadataServiceClient + k8s_client: MockK8SClient + influxdb_client: MockInfluxDBClient + queue: AsyncQueue[DKGSlice] + running_event: asyncio.Event + runner: asyncio.Runner + + def setUp(self) -> None: + self.client = MockMetadataServiceClient() + self.influxdb_client = MockInfluxDBClient() + self.queue = AsyncQueue() + self.k8s_client = MockK8SClient() + self.running_event = asyncio.Event() + self.running_event.set() + self.runner = asyncio.Runner() + + def test_build(self) -> None: + builder = self.create_builder() + self.runner.run(self.run_builder(builder)) + + slice = self.wait_for_slice(2) + + self.assertEqual(slice.graph, InMemoryGraph()) + self.assertEqual(slice.timestamp, 0) + self.assertEqual(slice.slice_id, KGSliceId("127.0.0.1", 80)) + + def wait_for_slice(self, seconds: int) -> DKGSlice: + start = datetime.datetime.now() + while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): + slice = self.queue.get_nowait() + if slice: + return slice + self.runner.run(asyncio.sleep(0.1)) + raise AssertionError("time is up.") + + def create_builder(self) -> KGBuilder: + repository = KGRepository(self.client, self.get_jsonld_config()) + influxdb_repository = InfluxDBRepository(self.influxdb_client) + return KGBuilder( + self.running_event, + self.queue, + self.k8s_client, + repository, + influxdb_repository, + ) + + async def run_builder(self, builder: KGBuilder) -> None: + asyncio.create_task(builder.run()) diff --git a/app/core/test_kg_repository.py b/app/core/test_kg_repository.py new file mode 100644 index 0000000..61c478d --- /dev/null +++ b/app/core/test_kg_repository.py @@ -0,0 +1,37 @@ +import asyncio +from unittest import TestCase + +from app.clients.metadata_service.mock_metadata_service_client import ( + MockMetadataServiceClient, +) +from app.core.kg_repository import KGRepository +from app.core.kg_tuple_parser import KGTupleParser +from app.core.test_graph_fixture import TestGraphFixture +from app.core.types import KGSliceId +from app.kg.inmemory_graph import InMemoryGraph + + +class KGRepositoryTest(TestCase, TestGraphFixture): + def test_update(self) -> None: + client = MockMetadataServiceClient() + repository = KGRepository(client, self.get_jsonld_config()) + slice_id = KGSliceId("127.0.0.1", 80) + + graph, expected = self.simple_node() + asyncio.run(repository.update(slice_id, graph)) + + graphs = client.get_inserts(slice_id.get_host_port()) + self.assertEqual(expected, graphs[0]) + + def test_query(self) -> None: + client = MockMetadataServiceClient() + repository = KGRepository(client, self.get_jsonld_config()) + slice_id = KGSliceId("127.0.0.1", 80) + query_str = "sparql query" + result_parser = KGTupleParser() + + client.mock_query(slice_id.get_host_port(), query_str, []) + + actual = asyncio.run(repository.query(slice_id, query_str, result_parser)) + expected = InMemoryGraph() + self.assertEqual(expected, actual) diff --git a/app/core/test_kg_tuple_parser.py b/app/core/test_kg_tuple_parser.py new file mode 100644 index 0000000..c2423f3 --- /dev/null +++ b/app/core/test_kg_tuple_parser.py @@ -0,0 +1,11 @@ +from unittest import TestCase + +from app.core.kg_tuple_parser import KGTupleParser +from app.kg.inmemory_graph import InMemoryGraph + + +class KGTupleParserTest(TestCase): + def test_parse_empty(self) -> None: + parser = KGTupleParser() + graph = parser.parse([]) + self.assertEqual(graph, InMemoryGraph()) diff --git a/app/core/test_kg_updater.py b/app/core/test_kg_updater.py new file mode 100644 index 0000000..0d98bb2 --- /dev/null +++ b/app/core/test_kg_updater.py @@ -0,0 +1,57 @@ +import asyncio +import datetime +from unittest import TestCase + +from app.clients.metadata_service.mock_metadata_service_client import ( + MockMetadataServiceClient, + SerializedGraph, +) +from app.core.async_queue import AsyncQueue +from app.core.kg_repository import KGRepository +from app.core.kg_updater import KGUpdater +from app.core.test_graph_fixture import TestGraphFixture +from app.core.types import DKGSlice, KGSliceId + + +class KGUpdaterTest(TestCase, TestGraphFixture): + client: MockMetadataServiceClient + queue: AsyncQueue[DKGSlice] + running_event: asyncio.Event + runner: asyncio.Runner + + def setUp(self) -> None: + self.client = MockMetadataServiceClient() + self.queue = AsyncQueue() + self.running_event = asyncio.Event() + self.running_event.set() + self.runner = asyncio.Runner() + + def test_kg_updater(self) -> None: + updater = self.create_updater() + self.runner.run(self.run_updater(updater)) + + graph, serialized = self.simple_node() + slice_id = KGSliceId("127.0.0.1", 80) + slice = DKGSlice(slice_id, graph, 1) + self.queue.put_nowait(slice) + + graph_str = self.wait_for_graph(slice_id, 5) + self.assertEqual(serialized, graph_str) + + self.running_event.clear() + + def wait_for_graph(self, slice_id: KGSliceId, seconds: int) -> SerializedGraph: + start = datetime.datetime.now() + while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): + graphs = self.client.get_inserts(slice_id.get_host_port()) + if len(graphs) > 0: + return graphs[0] + self.runner.run(asyncio.sleep(0.1)) + raise AssertionError("time is up.") + + def create_updater(self) -> KGUpdater: + repository = KGRepository(self.client, self.get_jsonld_config()) + return KGUpdater(self.running_event, self.queue, repository) + + async def run_updater(self, updater: KGUpdater) -> None: + asyncio.create_task(updater.run()) diff --git a/app/core/types.py b/app/core/types.py new file mode 100644 index 0000000..0bda2bf --- /dev/null +++ b/app/core/types.py @@ -0,0 +1,36 @@ +from typing import List, Set + +from dataclasses import dataclass, field + +from app.core.influxdb_repository import Metric, MetricQuery +from app.kg.graph import Graph + + +@dataclass +class KGSliceId: + node_ip: str + port: int + + def get_host_port(self) -> str: + return f"{self.node_ip}:{self.port}" + + +@dataclass +class DKGSlice: + slice_id: KGSliceId + graph: Graph + timestamp: int + + +@dataclass +class MetricsSnapshot: + pod_metrics: Set[Metric] = field(default_factory=set) + node_metrics: Set[Metric] = field(default_factory=set) + deployment_metrics: Set[Metric] = field(default_factory=set) + + +@dataclass +class QueryOptions: + pod_queries: List[MetricQuery] = field(default_factory=list) + node_queries: List[MetricQuery] = field(default_factory=list) + workload_queries: List[MetricQuery] = field(default_factory=list) diff --git a/app/k8s_transform/upper_ontology_base.py b/app/k8s_transform/upper_ontology_base.py index f40f160..458b09b 100644 --- a/app/k8s_transform/upper_ontology_base.py +++ b/app/k8s_transform/upper_ontology_base.py @@ -228,6 +228,9 @@ def add_work_producing_resource( ) -> None: self.add_common_info(identifier, self.WORK_PRODUCING_RESOURCE, description) + def add_subresource(self, identifier: IRI, subresource: IRI) -> None: + self.sink.add_relation(identifier, self.HAS_SUBRESOURCE, subresource) + def add_common_info( self, entity_id: IRI, entity_type: IRI, description: Optional[str] ) -> None: diff --git a/app/kg/inmemory_graph.py b/app/kg/inmemory_graph.py index 5bb624c..7474b9c 100644 --- a/app/kg/inmemory_graph.py +++ b/app/kg/inmemory_graph.py @@ -43,6 +43,16 @@ def get_properties(self) -> Dict[IRI, Any]: def get_meta_properties(self) -> Dict[IRI, IdBase]: return self.meta_properties + def __eq__(self, other: Any) -> bool: + if isinstance(other, GraphNode): + return ( + self.id == other.id + and self.properties == other.properties + and self.meta_properties == other.meta_properties + ) + else: + raise NotImplementedError + class GraphEdge: subject_id: IRI @@ -60,6 +70,16 @@ def add_object_id(self, object_id: IRI) -> None: def get_objects(self) -> Set[IRI]: return self.objects + def __eq__(self, other: Any) -> bool: + if isinstance(other, GraphEdge): + return ( + self.subject_id == other.subject_id + and self.predicate == other.predicate + and self.objects == other.objects + ) + else: + raise NotImplementedError + class InMemoryGraph(Graph): nodes: Dict[IRI, GraphNode] @@ -134,3 +154,9 @@ def get_node_relations(self, node_id: IRI) -> Dict[IRI, RelationSet]: } else: return {} + + def __eq__(self, other: Any) -> bool: + if isinstance(other, InMemoryGraph): + return self.nodes == other.nodes and self.edges == other.edges + else: + raise NotImplementedError diff --git a/app/kg/test_inmemory_graph.py b/app/kg/test_inmemory_graph.py index 3e96813..662f4b7 100644 --- a/app/kg/test_inmemory_graph.py +++ b/app/kg/test_inmemory_graph.py @@ -75,3 +75,34 @@ def test_edge(self): graph.get_node_relations(IRI("", "id2")), {IRI("", "rel2"): {IRI("", "id3"), IRI("", "id4")}}, ) + + def test_equality(self): + graph1 = InMemoryGraph() + graph2 = InMemoryGraph() + self.assertEqual(graph1, graph2) + + graph1.add_property( + IRI("", "id1"), IRI("", "rel1"), Literal("1", Literal.TYPE_INT) + ) + self.assertNotEqual(graph1, graph2) + + graph2.add_property( + IRI("", "id1"), IRI("", "rel1"), Literal("1", Literal.TYPE_INT) + ) + self.assertEqual(graph1, graph2) + + graph1.add_relation(IRI("", "id1"), IRI("", "rel2"), IRI("", "id2")) + self.assertNotEqual(graph1, graph2) + + graph2.add_relation(IRI("", "id1"), IRI("", "rel2"), IRI("", "id2")) + self.assertEqual(graph1, graph2) + + graph1.add_meta_property( + IRI("", "id1"), IRI("", "rel3"), Literal("1", Literal.TYPE_INT) + ) + self.assertNotEqual(graph1, graph2) + + graph2.add_meta_property( + IRI("", "id1"), IRI("", "rel3"), Literal("1", Literal.TYPE_INT) + ) + self.assertEqual(graph1, graph2) diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..bda6f0a --- /dev/null +++ b/app/main.py @@ -0,0 +1,26 @@ +import argparse +import logging +import signal + + +def main() -> None: + parser = argparse.ArgumentParser(description="Kubernetes watcher service") + parser.add_argument( + "--incluster", + dest="incluster", + action="store_true", + help="Load a Kubernetes config from within a cluster", + ) + args = parser.parse_args() + print(args) + + signal.signal(signal.SIGINT, signal.SIG_DFL) + + logger = logging.getLogger() + logger.setLevel(logging.INFO) + console_handler = logging.StreamHandler() + logger.addHandler(console_handler) + + +if __name__ == "__main__": + main() diff --git a/app/serialize/graph_serializer.py b/app/serialize/graph_serializer.py index 1cd919a..3903821 100644 --- a/app/serialize/graph_serializer.py +++ b/app/serialize/graph_serializer.py @@ -1,5 +1,5 @@ from abc import abstractmethod -from io import IOBase +from io import IOBase, StringIO from app.kg.graph import Graph @@ -8,3 +8,8 @@ class GraphSerializer: @abstractmethod def write(self, out: IOBase, graph: Graph) -> None: pass + + def to_string(self, graph: Graph) -> str: + buffer = StringIO() + self.write(buffer, graph) + return buffer.getvalue() diff --git a/app/test_context.py b/app/test_context.py new file mode 100644 index 0000000..8e8965a --- /dev/null +++ b/app/test_context.py @@ -0,0 +1,22 @@ +from unittest import TestCase + +from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient +from app.clients.k8s.mock_k8s_client import MockK8SClient +from app.clients.metadata_service.mock_metadata_service_client import ( + MockMetadataServiceClient, +) +from app.context import KGExporterContext +from app.serialize.jsonld_configuration import JsonLDConfiguration + + +class KGExporterContextTest(TestCase): + def test_start(self): + metadata_client = MockMetadataServiceClient() + k8s_client = MockK8SClient() + influxdb_client = MockInfluxDBClient() + jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) + context = KGExporterContext( + metadata_client, k8s_client, influxdb_client, jsonld_config + ) + context.start() + context.stop() diff --git a/pyproject.toml b/pyproject.toml index 8cfff7e..fea7890 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ pydantic-settings = "^2.2.1" pytest-vcr = "^1.0.2" influxdb-client = "^1.43.0" aiocsv = "1.3.2" +loguru = "0.7.2" [tool.poetry.group.dev.dependencies] black = "^23.12" From 5a10d5a4d5528865d445f806dad3ebf540f7a90b Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 4 Jun 2024 18:02:40 +0200 Subject: [PATCH 15/61] HHT-669: core processing fixes --- poetry.lock | 526 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 314 insertions(+), 212 deletions(-) diff --git a/poetry.lock b/poetry.lock index 38e910e..8a66766 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,25 +1,58 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "aiocsv" +version = "1.3.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiocsv-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1996ac960c196aecc7d22e701c273a2676d13bf25575af78d4e515fc724ef20"}, + {file = "aiocsv-1.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd688dbc1723f2b3a433e42041ceb9c9a8fe70f547d35b2da4ea31e4c78efc5"}, + {file = "aiocsv-1.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2f921828e386bb6945ed7d268e1524349ea506974ae35b9772542714f0ef3efd"}, + {file = "aiocsv-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:198c905ec29897c347bf9b18eb410af13d7ac94a03d4b673e64eaa5f4557c913"}, + {file = "aiocsv-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c25ad8afbf79d28ec3320e608c7f38d3eff93e96ebbbd2430ae8fa0f6e7631b"}, + {file = "aiocsv-1.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4004569bff39cb839a335b8f673a6496fd5b0b6e074c7adb7aee4a0c8379ea22"}, + {file = "aiocsv-1.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e9c98f8d760add0b52274523baa4b81dde4a3c96f79222d3d4d6965bac9cdcbd"}, + {file = "aiocsv-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:9edb342b0d7dba94d8976f46ba5814b8d8704d67a45e1b8a6579ab0ba04309e7"}, + {file = "aiocsv-1.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:db943a463cb6828ba81bd7c083c6dd4c96edac4880b8638af81798d694405e26"}, + {file = "aiocsv-1.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10780033a1ed3da825f2256449d177b7106b3c5a2d64bd683eab37f1fdee1e36"}, + {file = "aiocsv-1.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8c7aee34ceff4eaa654f01acbdba648297f5f9532dc7a23fac62defec28e0fe5"}, + {file = "aiocsv-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:59b0ea2d9e73539d4c1276467c4457acafa995717ea1b5340f3737f2cde2f71a"}, + {file = "aiocsv-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1c7d1700b8de16f25b24bfcebfc2b0817b29ce413f6961f08d5aa95bf00a6862"}, + {file = "aiocsv-1.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aa9629c8a1c07e9d02c7d80d84f021f7994fe30d021f13ac963e251b54724ef"}, + {file = "aiocsv-1.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d125286f971e0038e8872f31b6f1cd6184b9c508445e6633f075d8b543b444bc"}, + {file = "aiocsv-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:b7220b4a6545abbbb6ab8fe7d4880aa8334f156b872b83641b898df2da9a6484"}, + {file = "aiocsv-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfd2ef214b6d7944991f62ac593ad45bdaf0ed9f5741c8441ee7de148e512fe7"}, + {file = "aiocsv-1.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c3e5a817b3489283cc1fd80f8ba56431d552dc9ea4e539c0069d8d56bf0fba7"}, + {file = "aiocsv-1.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2ef14fa0839394ecc52274ea538b12b7b2e756eb0f514902a8fb391612161079"}, + {file = "aiocsv-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:17341fa3b90414adda6cd8c79efc3c1a3f58a4dc72c2053c4532e82b61ef9f5e"}, + {file = "aiocsv-1.3.2.tar.gz", hash = "sha256:806d93465c7808d58d3ff0d2bba270fb4d04b934be6a1e95d0834c50a510910e"}, +] + +[package.dependencies] +typing_extensions = "*" + [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -88,13 +121,13 @@ files = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -245,13 +278,13 @@ files = [ [[package]] name = "filelock" -version = "3.13.3" +version = "3.14.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"}, - {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"}, + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, ] [package.extras] @@ -277,13 +310,13 @@ pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "google-auth" -version = "2.28.1" +version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.28.1.tar.gz", hash = "sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885"}, - {file = "google_auth-2.28.1-py2.py3-none-any.whl", hash = "sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72"}, + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, ] [package.dependencies] @@ -356,13 +389,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.35" +version = "2.5.36" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, + {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, + {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, ] [package.extras] @@ -370,15 +403,39 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "influxdb-client" +version = "1.43.0" +description = "InfluxDB 2.0 Python client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "influxdb_client-1.43.0-py3-none-any.whl", hash = "sha256:f079e63018f521024118bc0141b6403c65506711e2e6e93500f8e69f1675dc38"}, + {file = "influxdb_client-1.43.0.tar.gz", hash = "sha256:ae2614d891baed52c0ae8f6194a04ee5b1c6422f6061318a3639fe63b7671b25"}, ] +[package.dependencies] +certifi = ">=14.05.14" +python-dateutil = ">=2.5.3" +reactivex = ">=4.0.4" +setuptools = ">=21.0.0" +urllib3 = ">=1.26.0" + +[package.extras] +async = ["aiocsv (>=1.2.2)", "aiohttp (>=3.8.1)"] +ciso = ["ciso8601 (>=2.1.1)"] +extra = ["numpy", "pandas (>=1.0.0)"] +test = ["aioresponses (>=0.7.3)", "coverage (>=4.0.3)", "flake8 (>=5.0.3)", "httpretty (==1.0.5)", "jinja2 (>=3.1.4)", "nose (>=1.3.7)", "pluggy (>=0.3.1)", "psutil (>=5.6.3)", "py (>=1.4.31)", "pytest (>=5.0.0)", "pytest-cov (>=3.0.0)", "pytest-timeout (>=2.1.0)", "randomize (>=0.13)", "sphinx (==1.8.5)", "sphinx-rtd-theme"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -457,6 +514,24 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "loguru" +version = "0.7.2" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] + [[package]] name = "mccabe" version = "0.7.0" @@ -569,38 +644,38 @@ files = [ [[package]] name = "mypy" -version = "1.8.0" +version = "1.10.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, ] [package.dependencies] @@ -626,18 +701,15 @@ files = [ [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.0" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, + {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "oauthlib" version = "3.2.2" @@ -656,13 +728,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -678,28 +750,29 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -719,13 +792,13 @@ files = [ [[package]] name = "pre-commit" -version = "3.7.0" +version = "3.7.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.7.0-py2.py3-none-any.whl", hash = "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab"}, - {file = "pre_commit-3.7.0.tar.gz", hash = "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"}, + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, ] [package.dependencies] @@ -737,28 +810,28 @@ virtualenv = ">=20.10.0" [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" @@ -773,18 +846,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.4" +version = "2.7.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, - {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, + {file = "pydantic-2.7.3-py3-none-any.whl", hash = "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4"}, + {file = "pydantic-2.7.3.tar.gz", hash = "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" +pydantic-core = "2.18.4" typing-extensions = ">=4.6.1" [package.extras] @@ -792,90 +865,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.18.4" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, + {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, + {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, + {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, + {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, + {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, + {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, + {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, + {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, + {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, + {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, + {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, + {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, + {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, ] [package.dependencies] @@ -883,17 +956,17 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.2.1" +version = "2.3.0" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"}, - {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"}, + {file = "pydantic_settings-2.3.0-py3-none-any.whl", hash = "sha256:26eeed27370a9c5e3f64e4a7d6602573cbedf05ed940f1d5b11c3f178427af7a"}, + {file = "pydantic_settings-2.3.0.tar.gz", hash = "sha256:78db28855a71503cfe47f39500a1dece523c640afd5280edb5c5c9c9cfa534c9"}, ] [package.dependencies] -pydantic = ">=2.3.0" +pydantic = ">=2.7.0" python-dotenv = ">=0.21.0" [package.extras] @@ -933,17 +1006,17 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] @@ -1051,15 +1124,29 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "reactivex" +version = "4.0.4" +description = "ReactiveX (Rx) for Python" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "reactivex-4.0.4-py3-none-any.whl", hash = "sha256:0004796c420bd9e68aad8e65627d85a8e13f293de76656165dffbcb3a0e3fb6a"}, + {file = "reactivex-4.0.4.tar.gz", hash = "sha256:e912e6591022ab9176df8348a653fe8c8fa7a301f26f9931c9d8c78a650e04e8"}, +] + +[package.dependencies] +typing-extensions = ">=4.1.1,<5.0.0" + [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1074,13 +1161,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.4" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, ] [package.dependencies] @@ -1106,19 +1193,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "69.2.0" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1144,68 +1230,70 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.12" +version = "6.0.12.20240311" description = "Typing stubs for PyYAML" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, ] [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.1" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, + {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "vcrpy" -version = "5.1.0" +version = "6.0.1" description = "Automatically mock your HTTP interactions to simplify and speed up testing" optional = false python-versions = ">=3.8" files = [ - {file = "vcrpy-5.1.0-py2.py3-none-any.whl", hash = "sha256:605e7b7a63dcd940db1df3ab2697ca7faf0e835c0852882142bafb19649d599e"}, - {file = "vcrpy-5.1.0.tar.gz", hash = "sha256:bbf1532f2618a04f11bce2a99af3a9647a32c880957293ff91e0a5f187b6b3d2"}, + {file = "vcrpy-6.0.1.tar.gz", hash = "sha256:9e023fee7f892baa0bbda2f7da7c8ac51165c1c6e38ff8688683a12a4bde9278"}, ] [package.dependencies] PyYAML = "*" +urllib3 = {version = "<2", markers = "platform_python_implementation == \"PyPy\""} wrapt = "*" yarl = "*" +[package.extras] +tests = ["Werkzeug (==2.0.3)", "aiohttp", "boto3", "httplib2", "httpx", "pytest", "pytest-aiohttp", "pytest-asyncio", "pytest-cov", "pytest-httpbin", "requests (>=2.22.0)", "tornado", "urllib3"] + [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, + {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, ] [package.dependencies] @@ -1214,25 +1302,39 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "websocket-client" -version = "1.7.0" +version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, ] [package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + [[package]] name = "wrapt" version = "1.16.0" @@ -1418,4 +1520,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "ff4d6a0e82ebae1b0054025ecc79a07e767af3fe88555913ea3f63141aa18936" +content-hash = "36c351d286a94645badbc697a917a1c5a6a3fddac2921010b9adb320e7ce2a43" From 726d1202a9989ae655b2a79ab57475df22c6aae6 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 4 Jun 2024 18:09:43 +0200 Subject: [PATCH 16/61] HHT-669: core processing fixes --- .github/workflows/test_and_build.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test_and_build.yaml b/.github/workflows/test_and_build.yaml index db1f481..e8a8b83 100644 --- a/.github/workflows/test_and_build.yaml +++ b/.github/workflows/test_and_build.yaml @@ -37,6 +37,7 @@ jobs: - name: Run style checks run: | + poetry run mypy --install-types poetry run mypy . poetry run isort . --check --diff poetry run flake8 . From 7b40c7c4edf66e88614a5991c1c09851275f830c Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 4 Jun 2024 18:13:30 +0200 Subject: [PATCH 17/61] HHT-669: core processing fixes --- .github/workflows/test_and_build.yaml | 1 - poetry.lock | 13 ++++++++++++- pyproject.toml | 1 + 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test_and_build.yaml b/.github/workflows/test_and_build.yaml index e8a8b83..db1f481 100644 --- a/.github/workflows/test_and_build.yaml +++ b/.github/workflows/test_and_build.yaml @@ -37,7 +37,6 @@ jobs: - name: Run style checks run: | - poetry run mypy --install-types poetry run mypy . poetry run isort . --check --diff poetry run flake8 . diff --git a/poetry.lock b/poetry.lock index 8a66766..4c9580e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1228,6 +1228,17 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240316" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, + {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, +] + [[package]] name = "types-pyyaml" version = "6.0.12.20240311" @@ -1520,4 +1531,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "36c351d286a94645badbc697a917a1c5a6a3fddac2921010b9adb320e7ce2a43" +content-hash = "a249fb8dea7868d0fb32d4e180d76b2de4c9e4b4d3efaa10fc4489c2d126197c" diff --git a/pyproject.toml b/pyproject.toml index fea7890..2ae5baf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ mypy-extensions = "^1.0" pyyaml = "^6.0.1" types-pyyaml = "^6.0.12.12" pre-commit = "^3.7.0" +types-python-dateutil = "^2.9.0" [tool.poetry.group.test.dependencies] pytest = "^7.4" From edaa4a319ef6c9c5b45a4b71c71cc2742f419a82 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 4 Jun 2024 18:15:52 +0200 Subject: [PATCH 18/61] HHT-669: core processing fixes --- poetry.lock | 216 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 216 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 4c9580e..d34868c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,6 +33,115 @@ files = [ [package.dependencies] typing_extensions = "*" +[[package]] +name = "aiohttp" +version = "3.9.3" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + [[package]] name = "annotated-types" version = "0.7.0" @@ -64,6 +173,25 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + [[package]] name = "black" version = "23.12.1" @@ -308,6 +436,92 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.2.0,<3.3.0" +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + [[package]] name = "google-auth" version = "2.29.0" @@ -1531,4 +1745,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "a249fb8dea7868d0fb32d4e180d76b2de4c9e4b4d3efaa10fc4489c2d126197c" +content-hash = "223cfe367e6de675663f1cb2e2a6d72fcddabf12f8690823e349791c55bfa19b" diff --git a/pyproject.toml b/pyproject.toml index 2ae5baf..a2a2c36 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ pydantic-settings = "^2.2.1" pytest-vcr = "^1.0.2" influxdb-client = "^1.43.0" aiocsv = "1.3.2" +aiohttp = "3.9.3" loguru = "0.7.2" [tool.poetry.group.dev.dependencies] From 9786e82f394f79c75ae9e191583b861679c5c457 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 4 Jun 2024 18:24:31 +0200 Subject: [PATCH 19/61] HHT-669: core processing --- app/clients/influxdb/influxdb_settings.py | 5 +- app/clients/k8s/k8s_client.py | 2 + app/clients/k8s/k8s_settings.py | 5 +- app/core/kg_builder.py | 45 +++++++++------ app/core/kg_slice_assembler.py | 60 +++++++++++++++++--- app/core/slice_for_node_strategy.py | 10 ++-- app/core/slice_strategy.py | 8 ++- app/core/test_kg_builder.py | 7 ++- app/core/test_kgslice_assembler.py | 21 +++++++ app/core/test_slice_for_node_strategy.py | 6 ++ app/core/types.py | 22 ++++---- app/k8s_transform/transformer_base.py | 4 ++ app/{context.py => kgexporter_context.py} | 15 +++++ app/kgexporter_context_builder.py | 65 ++++++++++++++++++++++ app/kgexporter_settings.py | 15 +++++ app/main.py | 24 +++----- app/metric_transform/__init__.py | 0 app/metric_transform/metric_transformer.py | 0 app/pydantic_yaml.py | 16 ++++++ app/test_context.py | 22 -------- app/test_kgexporter_context.py | 39 +++++++++++++ app/test_pydantic_yaml.py | 31 +++++++++++ pyproject.toml | 1 + 23 files changed, 333 insertions(+), 90 deletions(-) create mode 100644 app/core/test_kgslice_assembler.py create mode 100644 app/core/test_slice_for_node_strategy.py rename app/{context.py => kgexporter_context.py} (79%) create mode 100644 app/kgexporter_context_builder.py create mode 100644 app/kgexporter_settings.py create mode 100644 app/metric_transform/__init__.py create mode 100644 app/metric_transform/metric_transformer.py create mode 100644 app/pydantic_yaml.py delete mode 100644 app/test_context.py create mode 100644 app/test_kgexporter_context.py create mode 100644 app/test_pydantic_yaml.py diff --git a/app/clients/influxdb/influxdb_settings.py b/app/clients/influxdb/influxdb_settings.py index 065c072..2f82674 100644 --- a/app/clients/influxdb/influxdb_settings.py +++ b/app/clients/influxdb/influxdb_settings.py @@ -1,8 +1,7 @@ -from dataclasses import dataclass +from pydantic_settings import BaseSettings -@dataclass -class InfluxDBSettings: +class InfluxDBSettings(BaseSettings): url: str token: str org: str diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py index b83834e..5275103 100644 --- a/app/clients/k8s/k8s_client.py +++ b/app/clients/k8s/k8s_client.py @@ -6,6 +6,7 @@ @dataclass class ResourceSnapshot: + cluster: Dict[str, Any] = field(default_factory=dict) pods: List[Dict[str, Any]] = field(default_factory=list) nodes: List[Dict[str, Any]] = field(default_factory=list) deployments: List[Dict[str, Any]] = field(default_factory=list) @@ -36,6 +37,7 @@ async def fetch_snapshot(self) -> ResourceSnapshot: replicasets, ) = result return ResourceSnapshot( + cluster=dict(), pods=pods, nodes=nodes, deployments=deployments, diff --git a/app/clients/k8s/k8s_settings.py b/app/clients/k8s/k8s_settings.py index 23110d6..865a809 100644 --- a/app/clients/k8s/k8s_settings.py +++ b/app/clients/k8s/k8s_settings.py @@ -1,6 +1,5 @@ -from dataclasses import dataclass +from pydantic_settings import BaseSettings -@dataclass -class K8SSettings: +class K8SSettings(BaseSettings): in_cluster: bool diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 21c7ac5..b083a36 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -1,15 +1,29 @@ +from typing import List + import asyncio from loguru import logger +from pydantic_settings import BaseSettings from app.clients.k8s.k8s_client import K8SClient from app.core.async_queue import AsyncQueue -from app.core.influxdb_repository import InfluxDBRepository +from app.core.influxdb_repository import InfluxDBRepository, MetricQuery from app.core.kg_repository import KGRepository from app.core.kg_slice_assembler import KGSliceAssembler from app.core.slice_for_node_strategy import SliceForNodeStrategy from app.core.slice_strategy import SliceStrategy -from app.core.types import DKGSlice, QueryOptions +from app.core.types import DKGSlice, MetricSnapshot + + +class QuerySettings(BaseSettings): + pod_queries: List[MetricQuery] = [] + node_queries: List[MetricQuery] = [] + workload_queries: List[MetricQuery] = [] + + +class KGBuilderSettings(BaseSettings): + builder_tick_seconds: int + influxdb_queries: QuerySettings class KGBuilder: @@ -18,7 +32,7 @@ class KGBuilder: queue: AsyncQueue[DKGSlice] kg_repository: KGRepository influxdb_repository: InfluxDBRepository - query_options: QueryOptions + settings: KGBuilderSettings slice_strategy: SliceStrategy slice_assembler: KGSliceAssembler @@ -29,15 +43,14 @@ def __init__( k8s_client: K8SClient, kg_repository: KGRepository, influxdb_repository: InfluxDBRepository, + settings: KGBuilderSettings, ): self.running = running self.k8s_client = k8s_client self.queue = queue self.kg_repository = kg_repository self.influxdb_repository = influxdb_repository - self.query_options = QueryOptions( - pod_queries=[], node_queries=[], workload_queries=[] - ) + self.settings = settings self.slice_strategy = SliceForNodeStrategy() self.slice_assembler = KGSliceAssembler() @@ -52,29 +65,27 @@ async def run(self) -> None: ) = await asyncio.gather( self.k8s_client.fetch_snapshot(), self.influxdb_repository.query_many( - now, self.query_options.pod_queries + now, self.settings.influxdb_queries.pod_queries ), self.influxdb_repository.query_many( - now, self.query_options.node_queries + now, self.settings.influxdb_queries.node_queries ), self.influxdb_repository.query_many( - now, self.query_options.workload_queries + now, self.settings.influxdb_queries.workload_queries ), ) + metric_snapshot = MetricSnapshot( + pod_metrics, node_metrics, workload_metrics + ) logger.debug(cluster_snapshot) logger.debug(pod_metrics) logger.debug(node_metrics) logger.debug(workload_metrics) - slice_ids = self.slice_strategy.get_slices(cluster_snapshot) - for slice_id in slice_ids: + slices = self.slice_strategy.get_slices(cluster_snapshot, metric_snapshot) + for slice_id, slice_inputs in slices.items(): slice = self.slice_assembler.assemble( - now=now, - slice_id=slice_id, - cluster_snapshot=cluster_snapshot, - pod_metrics=pod_metrics, - node_metrics=node_metrics, - workload_metrics=workload_metrics, + now=now, slice_id=slice_id, inputs=slice_inputs ) self.queue.put_nowait(slice) await asyncio.sleep(30) diff --git a/app/core/kg_slice_assembler.py b/app/core/kg_slice_assembler.py index d889e34..1ab4e8c 100644 --- a/app/core/kg_slice_assembler.py +++ b/app/core/kg_slice_assembler.py @@ -1,8 +1,13 @@ -from typing import List +from typing import Any, Dict, List, Type -from app.clients.influxdb.metric_value import MetricValue from app.clients.k8s.k8s_client import ResourceSnapshot -from app.core.types import DKGSlice, KGSliceId +from app.core.types import DKGSlice, KGSliceId, MetricSnapshot, SliceInputs +from app.k8s_transform.node_transformer import NodesToRDFTransformer +from app.k8s_transform.pod_transformer import PodToRDFTransformer +from app.k8s_transform.transformation_context import TransformationContext +from app.k8s_transform.transformer_base import TransformerBase +from app.k8s_transform.workload_transformer import WorkloadToRDFTransformer +from app.kg.graph import Graph from app.kg.inmemory_graph import InMemoryGraph @@ -11,11 +16,48 @@ def assemble( self, now: int, slice_id: KGSliceId, - cluster_snapshot: ResourceSnapshot, - pod_metrics: List[MetricValue], - node_metrics: List[MetricValue], - workload_metrics: List[MetricValue], + inputs: SliceInputs, ) -> DKGSlice: - graph = InMemoryGraph() - slice = DKGSlice(slice_id, graph, now) + sink = InMemoryGraph() + + self.transform_resources(now, inputs.resource, sink) + self.transform_metrics(now, inputs.metrics, sink) + + slice = DKGSlice(slice_id, sink, now) return slice + + def transform_resources( + self, now: int, snapshot: ResourceSnapshot, sink: Graph + ) -> None: + context = TransformationContext(now) + self.transform_resource(sink, snapshot.nodes, context, NodesToRDFTransformer) + self.transform_resource(sink, snapshot.pods, context, PodToRDFTransformer) + self.transform_resource( + sink, snapshot.daemonsets, context, WorkloadToRDFTransformer + ) + self.transform_resource( + sink, snapshot.deployments, context, WorkloadToRDFTransformer + ) + self.transform_resource( + sink, snapshot.replicasets, context, WorkloadToRDFTransformer + ) + self.transform_resource(sink, snapshot.jobs, context, WorkloadToRDFTransformer) + self.transform_resource( + sink, snapshot.statefullsets, context, WorkloadToRDFTransformer + ) + + def transform_resource( + self, + sink: Graph, + nodes: List[Dict[str, Any]], + context: TransformationContext, + transformer_cls: Type[TransformerBase], + ) -> None: + for node in nodes: + transformer = transformer_cls(node, sink) + transformer.transform(context) + + def transform_metrics( + self, now: int, snapshot: MetricSnapshot, sink: Graph + ) -> None: + pass diff --git a/app/core/slice_for_node_strategy.py b/app/core/slice_for_node_strategy.py index d3432ea..ad64cf6 100644 --- a/app/core/slice_for_node_strategy.py +++ b/app/core/slice_for_node_strategy.py @@ -1,11 +1,13 @@ -from typing import List +from typing import Dict from app.clients.k8s.k8s_client import ResourceSnapshot from app.core.slice_strategy import SliceStrategy -from app.core.types import KGSliceId +from app.core.types import KGSliceId, MetricSnapshot, SliceInputs class SliceForNodeStrategy(SliceStrategy): - def get_slices(self, snapshot: ResourceSnapshot) -> List[KGSliceId]: + def get_slices( + self, resources: ResourceSnapshot, metrics: MetricSnapshot + ) -> Dict[KGSliceId, SliceInputs]: slice_id = KGSliceId("127.0.0.1", 80) - return [slice_id] + return {slice_id: SliceInputs(resources, metrics)} diff --git a/app/core/slice_strategy.py b/app/core/slice_strategy.py index d64552a..27797ea 100644 --- a/app/core/slice_strategy.py +++ b/app/core/slice_strategy.py @@ -1,9 +1,11 @@ -from typing import List +from typing import Dict from app.clients.k8s.k8s_client import ResourceSnapshot -from app.core.types import KGSliceId +from app.core.types import KGSliceId, MetricSnapshot, SliceInputs class SliceStrategy: - def get_slices(self, snapshot: ResourceSnapshot) -> List[KGSliceId]: + def get_slices( + self, resources: ResourceSnapshot, metrics: MetricSnapshot + ) -> Dict[KGSliceId, SliceInputs]: raise NotImplementedError diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index fa2bb3e..0da506f 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -9,7 +9,7 @@ ) from app.core.async_queue import AsyncQueue from app.core.influxdb_repository import InfluxDBRepository -from app.core.kg_builder import KGBuilder +from app.core.kg_builder import KGBuilder, KGBuilderSettings, QuerySettings from app.core.kg_repository import KGRepository from app.core.test_graph_fixture import TestGraphFixture from app.core.types import DKGSlice, KGSliceId @@ -23,6 +23,7 @@ class KGBuilderTest(TestCase, TestGraphFixture): queue: AsyncQueue[DKGSlice] running_event: asyncio.Event runner: asyncio.Runner + settings: KGBuilderSettings def setUp(self) -> None: self.client = MockMetadataServiceClient() @@ -32,6 +33,9 @@ def setUp(self) -> None: self.running_event = asyncio.Event() self.running_event.set() self.runner = asyncio.Runner() + self.settings = KGBuilderSettings( + builder_tick_seconds=1, influxdb_queries=QuerySettings() + ) def test_build(self) -> None: builder = self.create_builder() @@ -61,6 +65,7 @@ def create_builder(self) -> KGBuilder: self.k8s_client, repository, influxdb_repository, + self.settings, ) async def run_builder(self, builder: KGBuilder) -> None: diff --git a/app/core/test_kgslice_assembler.py b/app/core/test_kgslice_assembler.py new file mode 100644 index 0000000..64da479 --- /dev/null +++ b/app/core/test_kgslice_assembler.py @@ -0,0 +1,21 @@ +from unittest import TestCase + +from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.kg_slice_assembler import KGSliceAssembler +from app.core.types import KGSliceId, MetricSnapshot, SliceInputs + + +class KGSliceAssemblerTest(TestCase): + def test_assemble_empty(self) -> None: + now = 1 + slice_id = KGSliceId("127.0.0.1", 80) + resource_snapshot = ResourceSnapshot() + metric_snapshot = MetricSnapshot() + inputs = SliceInputs(resource_snapshot, metric_snapshot) + assembler = KGSliceAssembler() + + assembler.assemble( + now, + slice_id, + inputs, + ) diff --git a/app/core/test_slice_for_node_strategy.py b/app/core/test_slice_for_node_strategy.py new file mode 100644 index 0000000..852cddf --- /dev/null +++ b/app/core/test_slice_for_node_strategy.py @@ -0,0 +1,6 @@ +from unittest import TestCase + + +class SliceForNodeStrategyTest(TestCase): + def test_slice(self) -> None: + pass diff --git a/app/core/types.py b/app/core/types.py index 0bda2bf..b8a8334 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -1,12 +1,13 @@ -from typing import List, Set +from typing import List from dataclasses import dataclass, field -from app.core.influxdb_repository import Metric, MetricQuery +from app.clients.influxdb.metric_value import MetricValue +from app.clients.k8s.k8s_client import ResourceSnapshot from app.kg.graph import Graph -@dataclass +@dataclass(frozen=True) class KGSliceId: node_ip: str port: int @@ -23,14 +24,13 @@ class DKGSlice: @dataclass -class MetricsSnapshot: - pod_metrics: Set[Metric] = field(default_factory=set) - node_metrics: Set[Metric] = field(default_factory=set) - deployment_metrics: Set[Metric] = field(default_factory=set) +class MetricSnapshot: + pod_metrics: List[MetricValue] = field(default_factory=list) + node_metrics: List[MetricValue] = field(default_factory=list) + workload_metrics: List[MetricValue] = field(default_factory=list) @dataclass -class QueryOptions: - pod_queries: List[MetricQuery] = field(default_factory=list) - node_queries: List[MetricQuery] = field(default_factory=list) - workload_queries: List[MetricQuery] = field(default_factory=list) +class SliceInputs: + resource: ResourceSnapshot + metrics: MetricSnapshot diff --git a/app/k8s_transform/transformer_base.py b/app/k8s_transform/transformer_base.py index 6dd632a..d94ac08 100644 --- a/app/k8s_transform/transformer_base.py +++ b/app/k8s_transform/transformer_base.py @@ -4,6 +4,7 @@ from jsonpath_ng.ext import parse +from app.k8s_transform.transformation_context import TransformationContext from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI @@ -46,6 +47,9 @@ def __init__(self, source: Dict[str, Any], sink: Graph): self.source = source self.sink = sink + def transform(self, context: TransformationContext) -> None: + raise NotImplementedError + def get_reference_id(self, reference: Dict[str, Any]) -> Tuple[IRI, str]: name = reference.get("name") uid = reference.get("uid") diff --git a/app/context.py b/app/kgexporter_context.py similarity index 79% rename from app/context.py rename to app/kgexporter_context.py index 4244871..6d01b12 100644 --- a/app/context.py +++ b/app/kgexporter_context.py @@ -1,6 +1,9 @@ from typing import Any, List import asyncio +from wsgiref.simple_server import WSGIServer + +from prometheus_client import start_http_server from app.clients.influxdb.influxdb_client import InfluxDBClient from app.clients.k8s.k8s_client import K8SClient @@ -12,6 +15,7 @@ from app.core.kg_repository import KGRepository from app.core.kg_updater import KGUpdater from app.core.types import DKGSlice +from app.kgexporter_settings import KGExporterSettings from app.serialize.jsonld_configuration import JsonLDConfiguration @@ -22,7 +26,9 @@ class KGExporterContext: runner: asyncio.Runner dkg_slice_store: DKGSliceStore running: asyncio.Event + prometheus_server: WSGIServer tasks: List[asyncio.Task[Any]] + settings: KGExporterSettings def __init__( self, @@ -30,7 +36,9 @@ def __init__( k8s_client: K8SClient, influxdb_client: InfluxDBClient, jsonld_config: JsonLDConfiguration, + settings: KGExporterSettings, ): + self.settings = settings kg_repository = KGRepository(metadata_client, jsonld_config) influxdb_repository = InfluxDBRepository(influxdb_client) self.running = asyncio.Event() @@ -42,6 +50,7 @@ def __init__( k8s_client, kg_repository, influxdb_repository, + self.settings.builder, ) self.updater = KGUpdater(self.running, self.queue, kg_repository) self.runner = asyncio.Runner() @@ -52,6 +61,8 @@ def start(self) -> None: return self.running.set() self.runner.run(self.run_tasks()) + server, _ = start_http_server(8080) + self.prometheus_server = server async def run_tasks(self) -> None: self.tasks.append(asyncio.create_task(self.builder.run())) @@ -59,3 +70,7 @@ async def run_tasks(self) -> None: def stop(self) -> None: self.running.clear() + self.prometheus_server.shutdown() + + def wait_for_termination(self) -> None: + self.runner.run(self.running.wait()) diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py new file mode 100644 index 0000000..81532af --- /dev/null +++ b/app/kgexporter_context_builder.py @@ -0,0 +1,65 @@ +from typing import List + +import argparse + +from app.clients.influxdb.influxdb_client_impl import InfluxDBClientImpl +from app.clients.influxdb.influxdb_settings import InfluxDBSettings +from app.clients.k8s.k8s_client_impl import K8SClientImpl +from app.clients.k8s.k8s_settings import K8SSettings +from app.clients.metadata_service.metadata_service_client_impl import ( + MetadataServiceClientImpl, +) +from app.clients.metadata_service.metadata_service_settings import ( + MetadataServiceSettings, +) +from app.core.kg_builder import KGBuilderSettings, QuerySettings +from app.kgexporter_context import KGExporterContext +from app.kgexporter_settings import KGExporterSettings +from app.serialize.jsonld_configuration import JsonLDConfiguration + + +class KGExporterContextBuilder: + args: List[str] + + def __init__(self, args: List[str]): + self.args = args + + def build(self) -> KGExporterContext: + settings = self.get_settings() + metadata_client = MetadataServiceClientImpl(settings.metadata) + k8s_client = K8SClientImpl(settings.k8s) + influxdb_client = InfluxDBClientImpl(settings.influxdb) + jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) + + context = KGExporterContext( + metadata_client, k8s_client, influxdb_client, jsonld_config, settings + ) + return context + + def get_settings(self) -> KGExporterSettings: + return KGExporterSettings( + builder=KGBuilderSettings( + builder_tick_seconds=1, influxdb_queries=QuerySettings() + ), + k8s=K8SSettings(in_cluster=True), + influxdb=InfluxDBSettings( + url="test", token="token", org="org", timeout=60000 + ), + metadata=MetadataServiceSettings(), + ) + + def parse(self): + parser = argparse.ArgumentParser(description="Kubernetes watcher service") + parser.add_argument( + "--config", + dest="config", + action="store", + help="Configuration of the KGExporter", + ) + args = parser.parse_args() + print(args) + + # logger = logging.getLogger() + # logger.setLevel(logging.INFO) + # console_handler = logging.StreamHandler() + # logger.addHandler(console_handler) diff --git a/app/kgexporter_settings.py b/app/kgexporter_settings.py new file mode 100644 index 0000000..fd70ddf --- /dev/null +++ b/app/kgexporter_settings.py @@ -0,0 +1,15 @@ +from pydantic_settings import BaseSettings + +from app.clients.influxdb.influxdb_settings import InfluxDBSettings +from app.clients.k8s.k8s_settings import K8SSettings +from app.clients.metadata_service.metadata_service_settings import ( + MetadataServiceSettings, +) +from app.core.kg_builder import KGBuilderSettings + + +class KGExporterSettings(BaseSettings): + builder: KGBuilderSettings + k8s: K8SSettings + influxdb: InfluxDBSettings + metadata: MetadataServiceSettings diff --git a/app/main.py b/app/main.py index bda6f0a..de6fec1 100644 --- a/app/main.py +++ b/app/main.py @@ -1,25 +1,15 @@ -import argparse -import logging import signal +import sys +from app.kgexporter_context_builder import KGExporterContextBuilder -def main() -> None: - parser = argparse.ArgumentParser(description="Kubernetes watcher service") - parser.add_argument( - "--incluster", - dest="incluster", - action="store_true", - help="Load a Kubernetes config from within a cluster", - ) - args = parser.parse_args() - print(args) +def main() -> None: signal.signal(signal.SIGINT, signal.SIG_DFL) - - logger = logging.getLogger() - logger.setLevel(logging.INFO) - console_handler = logging.StreamHandler() - logger.addHandler(console_handler) + builder = KGExporterContextBuilder(sys.argv[1:]) + context = builder.build() + context.start() + context.wait_for_termination() if __name__ == "__main__": diff --git a/app/metric_transform/__init__.py b/app/metric_transform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/metric_transform/metric_transformer.py b/app/metric_transform/metric_transformer.py new file mode 100644 index 0000000..e69de29 diff --git a/app/pydantic_yaml.py b/app/pydantic_yaml.py new file mode 100644 index 0000000..a01c68c --- /dev/null +++ b/app/pydantic_yaml.py @@ -0,0 +1,16 @@ +from typing import Type + +from pydantic_settings import BaseSettings +from yaml import safe_dump, safe_load + + +def from_yaml(file_path: str, cls: Type[BaseSettings]) -> BaseSettings: + with open(file_path, "r") as file: + file_struct = safe_load(file) + return cls.model_validate(file_struct) + + +def to_yaml(file_path: str, obj: BaseSettings) -> None: + obj_dict = obj.model_dump() + with open(file_path, "w") as file: + safe_dump(obj_dict, file) diff --git a/app/test_context.py b/app/test_context.py deleted file mode 100644 index 8e8965a..0000000 --- a/app/test_context.py +++ /dev/null @@ -1,22 +0,0 @@ -from unittest import TestCase - -from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient -from app.clients.k8s.mock_k8s_client import MockK8SClient -from app.clients.metadata_service.mock_metadata_service_client import ( - MockMetadataServiceClient, -) -from app.context import KGExporterContext -from app.serialize.jsonld_configuration import JsonLDConfiguration - - -class KGExporterContextTest(TestCase): - def test_start(self): - metadata_client = MockMetadataServiceClient() - k8s_client = MockK8SClient() - influxdb_client = MockInfluxDBClient() - jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) - context = KGExporterContext( - metadata_client, k8s_client, influxdb_client, jsonld_config - ) - context.start() - context.stop() diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py new file mode 100644 index 0000000..3363186 --- /dev/null +++ b/app/test_kgexporter_context.py @@ -0,0 +1,39 @@ +from unittest import TestCase + +from app.clients.influxdb.influxdb_settings import InfluxDBSettings +from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient +from app.clients.k8s.k8s_settings import K8SSettings +from app.clients.k8s.mock_k8s_client import MockK8SClient +from app.clients.metadata_service.metadata_service_settings import ( + MetadataServiceSettings, +) +from app.clients.metadata_service.mock_metadata_service_client import ( + MockMetadataServiceClient, +) +from app.core.kg_builder import KGBuilderSettings, QuerySettings +from app.kgexporter_context import KGExporterContext +from app.kgexporter_settings import KGExporterSettings +from app.serialize.jsonld_configuration import JsonLDConfiguration + + +class KGExporterContextTest(TestCase): + def test_start(self): + metadata_client = MockMetadataServiceClient() + k8s_client = MockK8SClient() + influxdb_client = MockInfluxDBClient() + jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) + settings = KGExporterSettings( + builder=KGBuilderSettings( + builder_tick_seconds=1, influxdb_queries=QuerySettings() + ), + k8s=K8SSettings(in_cluster=True), + influxdb=InfluxDBSettings( + url="test", token="token", org="org", timeout=60000 + ), + metadata=MetadataServiceSettings(), + ) + context = KGExporterContext( + metadata_client, k8s_client, influxdb_client, jsonld_config, settings + ) + context.start() + context.stop() diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py new file mode 100644 index 0000000..054a9ee --- /dev/null +++ b/app/test_pydantic_yaml.py @@ -0,0 +1,31 @@ +from tempfile import TemporaryDirectory +from unittest import TestCase + +from app.clients.influxdb.influxdb_settings import InfluxDBSettings +from app.clients.k8s.k8s_settings import K8SSettings +from app.clients.metadata_service.metadata_service_settings import ( + MetadataServiceSettings, +) +from app.core.kg_builder import KGBuilderSettings, QuerySettings +from app.kgexporter_settings import KGExporterSettings +from app.pydantic_yaml import from_yaml, to_yaml + + +class PyDanticYamlTest(TestCase): + def test_dump_load_settings(self): + expected = KGExporterSettings( + builder=KGBuilderSettings( + builder_tick_seconds=1, influxdb_queries=QuerySettings() + ), + k8s=K8SSettings(in_cluster=True), + influxdb=InfluxDBSettings( + url="test", token="token", org="org", timeout=60000 + ), + metadata=MetadataServiceSettings(), + ) + with TemporaryDirectory("-pydantic", "test") as tmpdir: + yaml_file = f"{tmpdir}/test.yaml" + to_yaml(yaml_file, expected) + actual = from_yaml(yaml_file, KGExporterSettings) + + self.assertEqual(expected, actual) diff --git a/pyproject.toml b/pyproject.toml index a2a2c36..dc028f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ influxdb-client = "^1.43.0" aiocsv = "1.3.2" aiohttp = "3.9.3" loguru = "0.7.2" +prometheus-client = "0.20.0" [tool.poetry.group.dev.dependencies] black = "^23.12" From 396deea1f5e88ae0b891c4b4ec172cbfd09e422b Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Wed, 5 Jun 2024 15:23:58 +0200 Subject: [PATCH 20/61] HHT-669: influxdb processing refinement --- app/clients/influxdb/metric_value.py | 2 ++ app/clients/influxdb/simple_result_parser.py | 6 +++-- .../influxdb/test_simple_result_parser.py | 10 +++++++-- app/core/test_influxdb_repository.py | 8 +++---- poetry.lock | 22 +++++++++++++++---- 5 files changed, 36 insertions(+), 12 deletions(-) diff --git a/app/clients/influxdb/metric_value.py b/app/clients/influxdb/metric_value.py index bc27751..0a98088 100644 --- a/app/clients/influxdb/metric_value.py +++ b/app/clients/influxdb/metric_value.py @@ -3,10 +3,12 @@ from dataclasses import dataclass MetricId: TypeAlias = str +ResourceId: TypeAlias = str @dataclass class MetricValue: metric_id: MetricId + resource_id: ResourceId timestamp: int value: float diff --git a/app/clients/influxdb/simple_result_parser.py b/app/clients/influxdb/simple_result_parser.py index 6b5e6b7..5187f6e 100644 --- a/app/clients/influxdb/simple_result_parser.py +++ b/app/clients/influxdb/simple_result_parser.py @@ -5,13 +5,15 @@ class SimpleResultParser(QueryResultParser): - IDENTIFIER_FIELD: str = "identifier" + METRICID_FIELD: str = "metric_id" + RESOURCEID_FIELD: str = "resource_id" TIMESTAMP_FIELD: str = "timestamp" VALUE_FIELD: str = "value" def parse(self, row: Dict[str, Any]) -> MetricValue: return MetricValue( - row[self.IDENTIFIER_FIELD], + row[self.METRICID_FIELD], + row[self.RESOURCEID_FIELD], self.get_timestamp(row[self.TIMESTAMP_FIELD]), self.get_float(row[self.VALUE_FIELD]), ) diff --git a/app/clients/influxdb/test_simple_result_parser.py b/app/clients/influxdb/test_simple_result_parser.py index 718919b..5a5212d 100644 --- a/app/clients/influxdb/test_simple_result_parser.py +++ b/app/clients/influxdb/test_simple_result_parser.py @@ -31,12 +31,18 @@ def test_parse(self) -> None: "instance": "10.14.1.160:9100", "job": "kubernetes-service-endpoints", "namespace": "monitoring", - "identifier": "glaciation-testm1w5-master01", + "resource_id": "glaciation-testm1w5-master01", + "metric_id": "RAM.Capacity", "service": "monitoring-stack-prometheus-node-exporter", } actual = parser.parse(row) self.assertEqual( - MetricValue("glaciation-testm1w5-master01", 1717142400000, 26237685760.0), + MetricValue( + "RAM.Capacity", + "glaciation-testm1w5-master01", + 1717142400000, + 26237685760.0, + ), actual, ) diff --git a/app/core/test_influxdb_repository.py b/app/core/test_influxdb_repository.py index 7ccfdd9..7f0806a 100644 --- a/app/core/test_influxdb_repository.py +++ b/app/core/test_influxdb_repository.py @@ -15,7 +15,7 @@ def setUp(self) -> None: self.repository = InfluxDBRepository(self.client) def test_query_one(self) -> None: - expected = MetricValue("id", 100500, 42.0) + expected = MetricValue("id", "resource", 100500, 42.0) self.client.mock_query("test_query", [expected]) now = 1 query = MetricQuery( @@ -29,9 +29,9 @@ def test_query_one(self) -> None: self.assertEqual([expected], actual) def test_query_many(self) -> None: - expected1 = MetricValue("id1", 100500, 41.0) - expected2 = MetricValue("id2", 100500, 42.0) - expected3 = MetricValue("id3", 100500, 43.0) + expected1 = MetricValue("id1", "pod1", 100500, 41.0) + expected2 = MetricValue("id2", "node1", 100500, 42.0) + expected3 = MetricValue("id3", "deployment1", 100500, 43.0) self.client.mock_query("test_query1", [expected1]) self.client.mock_query("test_query2", [expected2]) self.client.mock_query("test_query3", [expected3]) diff --git a/poetry.lock b/poetry.lock index d34868c..8c76d4f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -915,13 +915,13 @@ files = [ [[package]] name = "nodeenv" -version = "1.9.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.9.0-py2.py3-none-any.whl", hash = "sha256:508ecec98f9f3330b636d4448c0f1a56fc68017c68f1e7857ebc52acf0eb879a"}, - {file = "nodeenv-1.9.0.tar.gz", hash = "sha256:07f144e90dae547bf0d4ee8da0ee42664a42a04e02ed68e06324348dafe4bdb1"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] [[package]] @@ -1022,6 +1022,20 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "prometheus-client" +version = "0.20.0" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.8" +files = [ + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, +] + +[package.extras] +twisted = ["twisted"] + [[package]] name = "pyasn1" version = "0.6.0" @@ -1745,4 +1759,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "223cfe367e6de675663f1cb2e2a6d72fcddabf12f8690823e349791c55bfa19b" +content-hash = "0c86c4ff0e349b323c4d82ade4b911d92dd667a438674d074983ba706d5cc90d" From 1436f442a096ab7b79d0f7e0cdf8f395af573e13 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Mon, 10 Jun 2024 14:28:53 +0200 Subject: [PATCH 21/61] HHT-669: abstracting metric repository --- app/clients/influxdb/influxdb_client.py | 2 +- app/clients/influxdb/influxdb_client_impl.py | 2 +- app/clients/influxdb/mock_infuxdbclient.py | 2 +- app/clients/influxdb/query_result_parser.py | 2 +- app/clients/influxdb/simple_result_parser.py | 2 +- app/clients/influxdb/test_simple_result_parser.py | 2 +- app/core/kg_builder.py | 14 +++++++------- ...influxdb_repository.py => metric_repository.py} | 4 ++-- app/{clients/influxdb => core}/metric_value.py | 0 app/core/test_kg_builder.py | 4 ++-- ...xdb_repository.py => test_metric_repository.py} | 10 +++++----- app/core/types.py | 2 +- app/kgexporter_context.py | 4 ++-- app/kgexporter_context_builder.py | 4 +--- app/test_kgexporter_context.py | 4 +--- app/test_pydantic_yaml.py | 4 +--- 16 files changed, 28 insertions(+), 34 deletions(-) rename app/core/{influxdb_repository.py => metric_repository.py} (94%) rename app/{clients/influxdb => core}/metric_value.py (100%) rename app/core/{test_influxdb_repository.py => test_metric_repository.py} (87%) diff --git a/app/clients/influxdb/influxdb_client.py b/app/clients/influxdb/influxdb_client.py index a6f90d3..bf11b09 100644 --- a/app/clients/influxdb/influxdb_client.py +++ b/app/clients/influxdb/influxdb_client.py @@ -1,7 +1,7 @@ from typing import List -from app.clients.influxdb.metric_value import MetricValue from app.clients.influxdb.query_result_parser import QueryResultParser +from app.core.metric_value import MetricValue class InfluxDBClient: diff --git a/app/clients/influxdb/influxdb_client_impl.py b/app/clients/influxdb/influxdb_client_impl.py index 8f2c329..d9a026f 100644 --- a/app/clients/influxdb/influxdb_client_impl.py +++ b/app/clients/influxdb/influxdb_client_impl.py @@ -5,8 +5,8 @@ from app.clients.influxdb.influxdb_client import InfluxDBClient from app.clients.influxdb.influxdb_settings import InfluxDBSettings -from app.clients.influxdb.metric_value import MetricValue from app.clients.influxdb.query_result_parser import QueryResultParser +from app.core.metric_value import MetricValue class InfluxDBClientImpl(InfluxDBClient): diff --git a/app/clients/influxdb/mock_infuxdbclient.py b/app/clients/influxdb/mock_infuxdbclient.py index d9a087e..26e5caa 100644 --- a/app/clients/influxdb/mock_infuxdbclient.py +++ b/app/clients/influxdb/mock_infuxdbclient.py @@ -1,8 +1,8 @@ from typing import Dict, List from app.clients.influxdb.influxdb_client import InfluxDBClient -from app.clients.influxdb.metric_value import MetricValue from app.clients.influxdb.query_result_parser import QueryResultParser +from app.core.metric_value import MetricValue class MockInfluxDBClient(InfluxDBClient): diff --git a/app/clients/influxdb/query_result_parser.py b/app/clients/influxdb/query_result_parser.py index e1d8615..6f38f7d 100644 --- a/app/clients/influxdb/query_result_parser.py +++ b/app/clients/influxdb/query_result_parser.py @@ -2,7 +2,7 @@ from datetime import datetime -from app.clients.influxdb.metric_value import MetricValue +from app.core.metric_value import MetricValue class QueryResultParser: diff --git a/app/clients/influxdb/simple_result_parser.py b/app/clients/influxdb/simple_result_parser.py index 5187f6e..c1727db 100644 --- a/app/clients/influxdb/simple_result_parser.py +++ b/app/clients/influxdb/simple_result_parser.py @@ -1,7 +1,7 @@ from typing import Any, Dict -from app.clients.influxdb.metric_value import MetricValue from app.clients.influxdb.query_result_parser import QueryResultParser +from app.core.metric_value import MetricValue class SimpleResultParser(QueryResultParser): diff --git a/app/clients/influxdb/test_simple_result_parser.py b/app/clients/influxdb/test_simple_result_parser.py index 5a5212d..759f612 100644 --- a/app/clients/influxdb/test_simple_result_parser.py +++ b/app/clients/influxdb/test_simple_result_parser.py @@ -3,8 +3,8 @@ from dateutil.tz import tzutc -from app.clients.influxdb.metric_value import MetricValue from app.clients.influxdb.simple_result_parser import SimpleResultParser +from app.core.metric_value import MetricValue class SimpleResultParserTest(TestCase): diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index b083a36..cd88eb2 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -7,9 +7,9 @@ from app.clients.k8s.k8s_client import K8SClient from app.core.async_queue import AsyncQueue -from app.core.influxdb_repository import InfluxDBRepository, MetricQuery from app.core.kg_repository import KGRepository from app.core.kg_slice_assembler import KGSliceAssembler +from app.core.metric_repository import MetricQuery, MetricRepository from app.core.slice_for_node_strategy import SliceForNodeStrategy from app.core.slice_strategy import SliceStrategy from app.core.types import DKGSlice, MetricSnapshot @@ -23,7 +23,7 @@ class QuerySettings(BaseSettings): class KGBuilderSettings(BaseSettings): builder_tick_seconds: int - influxdb_queries: QuerySettings + queries: QuerySettings class KGBuilder: @@ -31,7 +31,7 @@ class KGBuilder: k8s_client: K8SClient queue: AsyncQueue[DKGSlice] kg_repository: KGRepository - influxdb_repository: InfluxDBRepository + influxdb_repository: MetricRepository settings: KGBuilderSettings slice_strategy: SliceStrategy slice_assembler: KGSliceAssembler @@ -42,7 +42,7 @@ def __init__( queue: AsyncQueue[DKGSlice], k8s_client: K8SClient, kg_repository: KGRepository, - influxdb_repository: InfluxDBRepository, + influxdb_repository: MetricRepository, settings: KGBuilderSettings, ): self.running = running @@ -65,13 +65,13 @@ async def run(self) -> None: ) = await asyncio.gather( self.k8s_client.fetch_snapshot(), self.influxdb_repository.query_many( - now, self.settings.influxdb_queries.pod_queries + now, self.settings.queries.pod_queries ), self.influxdb_repository.query_many( - now, self.settings.influxdb_queries.node_queries + now, self.settings.queries.node_queries ), self.influxdb_repository.query_many( - now, self.settings.influxdb_queries.workload_queries + now, self.settings.queries.workload_queries ), ) metric_snapshot = MetricSnapshot( diff --git a/app/core/influxdb_repository.py b/app/core/metric_repository.py similarity index 94% rename from app/core/influxdb_repository.py rename to app/core/metric_repository.py index cca5959..d78806b 100644 --- a/app/core/influxdb_repository.py +++ b/app/core/metric_repository.py @@ -5,9 +5,9 @@ from enum import StrEnum from app.clients.influxdb.influxdb_client import InfluxDBClient -from app.clients.influxdb.metric_value import MetricValue from app.clients.influxdb.query_result_parser import QueryResultParser from app.clients.influxdb.simple_result_parser import SimpleResultParser +from app.core.metric_value import MetricValue class ResultParserId(StrEnum): @@ -39,7 +39,7 @@ class Metric: source: str -class InfluxDBRepository: +class MetricRepository: client: InfluxDBClient def __init__(self, client: InfluxDBClient): diff --git a/app/clients/influxdb/metric_value.py b/app/core/metric_value.py similarity index 100% rename from app/clients/influxdb/metric_value.py rename to app/core/metric_value.py diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index 0da506f..3081a46 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -8,9 +8,9 @@ MockMetadataServiceClient, ) from app.core.async_queue import AsyncQueue -from app.core.influxdb_repository import InfluxDBRepository from app.core.kg_builder import KGBuilder, KGBuilderSettings, QuerySettings from app.core.kg_repository import KGRepository +from app.core.metric_repository import MetricRepository from app.core.test_graph_fixture import TestGraphFixture from app.core.types import DKGSlice, KGSliceId from app.kg.inmemory_graph import InMemoryGraph @@ -58,7 +58,7 @@ def wait_for_slice(self, seconds: int) -> DKGSlice: def create_builder(self) -> KGBuilder: repository = KGRepository(self.client, self.get_jsonld_config()) - influxdb_repository = InfluxDBRepository(self.influxdb_client) + influxdb_repository = MetricRepository(self.influxdb_client) return KGBuilder( self.running_event, self.queue, diff --git a/app/core/test_influxdb_repository.py b/app/core/test_metric_repository.py similarity index 87% rename from app/core/test_influxdb_repository.py rename to app/core/test_metric_repository.py index 7f0806a..2ded797 100644 --- a/app/core/test_influxdb_repository.py +++ b/app/core/test_metric_repository.py @@ -1,18 +1,18 @@ import asyncio from unittest import TestCase -from app.clients.influxdb.metric_value import MetricValue from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient -from app.core.influxdb_repository import InfluxDBRepository, MetricQuery, ResultParserId +from app.core.metric_repository import MetricQuery, MetricRepository, ResultParserId +from app.core.metric_value import MetricValue -class InfluxDBRepositoryTest(TestCase): +class MetricRepositoryTest(TestCase): client: MockInfluxDBClient - repository: InfluxDBRepository + repository: MetricRepository def setUp(self) -> None: self.client = MockInfluxDBClient() - self.repository = InfluxDBRepository(self.client) + self.repository = MetricRepository(self.client) def test_query_one(self) -> None: expected = MetricValue("id", "resource", 100500, 42.0) diff --git a/app/core/types.py b/app/core/types.py index b8a8334..993c0c2 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -2,8 +2,8 @@ from dataclasses import dataclass, field -from app.clients.influxdb.metric_value import MetricValue from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.metric_value import MetricValue from app.kg.graph import Graph diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 6d01b12..3855249 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -10,10 +10,10 @@ from app.clients.metadata_service.metadata_service_client import MetadataServiceClient from app.core.async_queue import AsyncQueue from app.core.dkg_slice_store import DKGSliceStore -from app.core.influxdb_repository import InfluxDBRepository from app.core.kg_builder import KGBuilder from app.core.kg_repository import KGRepository from app.core.kg_updater import KGUpdater +from app.core.metric_repository import MetricRepository from app.core.types import DKGSlice from app.kgexporter_settings import KGExporterSettings from app.serialize.jsonld_configuration import JsonLDConfiguration @@ -40,7 +40,7 @@ def __init__( ): self.settings = settings kg_repository = KGRepository(metadata_client, jsonld_config) - influxdb_repository = InfluxDBRepository(influxdb_client) + influxdb_repository = MetricRepository(influxdb_client) self.running = asyncio.Event() self.queue = AsyncQueue[DKGSlice]() self.dkg_slice_store = DKGSliceStore() diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index 81532af..80b11d1 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -38,9 +38,7 @@ def build(self) -> KGExporterContext: def get_settings(self) -> KGExporterSettings: return KGExporterSettings( - builder=KGBuilderSettings( - builder_tick_seconds=1, influxdb_queries=QuerySettings() - ), + builder=KGBuilderSettings(builder_tick_seconds=1, queries=QuerySettings()), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( url="test", token="token", org="org", timeout=60000 diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 3363186..76283a3 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -23,9 +23,7 @@ def test_start(self): influxdb_client = MockInfluxDBClient() jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) settings = KGExporterSettings( - builder=KGBuilderSettings( - builder_tick_seconds=1, influxdb_queries=QuerySettings() - ), + builder=KGBuilderSettings(builder_tick_seconds=1, queries=QuerySettings()), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( url="test", token="token", org="org", timeout=60000 diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py index 054a9ee..7071c20 100644 --- a/app/test_pydantic_yaml.py +++ b/app/test_pydantic_yaml.py @@ -14,9 +14,7 @@ class PyDanticYamlTest(TestCase): def test_dump_load_settings(self): expected = KGExporterSettings( - builder=KGBuilderSettings( - builder_tick_seconds=1, influxdb_queries=QuerySettings() - ), + builder=KGBuilderSettings(builder_tick_seconds=1, queries=QuerySettings()), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( url="test", token="token", org="org", timeout=60000 From 9234ed7b7596cbf2cab9c8100b7b0fa9435b5442 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 11 Jun 2024 13:18:07 +0200 Subject: [PATCH 22/61] HHT-669: Metric transformers --- app/core/kg_builder.py | 4 +- app/core/kg_slice_assembler.py | 13 +++- app/core/metric_repository.py | 20 ++--- app/core/test_kg_builder.py | 2 +- app/core/test_metric_repository.py | 28 +++++-- app/core/types.py | 23 +++++- app/metric_transform/__fixture__/node.jsonld | 76 +++++++++++++++++++ app/metric_transform/__fixture__/node.turtle | 32 ++++++++ app/metric_transform/__fixture__/pod1.jsonld | 75 ++++++++++++++++++ app/metric_transform/__fixture__/pod1.turtle | 29 +++++++ app/metric_transform/metric_transformer.py | 18 +++++ .../node_metric_transformer.py | 47 ++++++++++++ .../pod_metric_transformer.py | 42 ++++++++++ app/metric_transform/test_base.py | 48 ++++++++++++ .../test_node_metric_transformer.py | 74 ++++++++++++++++++ .../test_pod_metric_transformer.py | 74 ++++++++++++++++++ 16 files changed, 575 insertions(+), 30 deletions(-) create mode 100644 app/metric_transform/__fixture__/node.jsonld create mode 100644 app/metric_transform/__fixture__/node.turtle create mode 100644 app/metric_transform/__fixture__/pod1.jsonld create mode 100644 app/metric_transform/__fixture__/pod1.turtle create mode 100644 app/metric_transform/node_metric_transformer.py create mode 100644 app/metric_transform/pod_metric_transformer.py create mode 100644 app/metric_transform/test_base.py create mode 100644 app/metric_transform/test_node_metric_transformer.py create mode 100644 app/metric_transform/test_pod_metric_transformer.py diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index cd88eb2..70eb094 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -75,7 +75,9 @@ async def run(self) -> None: ), ) metric_snapshot = MetricSnapshot( - pod_metrics, node_metrics, workload_metrics + list(zip(self.settings.queries.pod_queries, pod_metrics)), + list(zip(self.settings.queries.node_queries, node_metrics)), + list(zip(self.settings.queries.workload_queries, workload_metrics)), ) logger.debug(cluster_snapshot) logger.debug(pod_metrics) diff --git a/app/core/kg_slice_assembler.py b/app/core/kg_slice_assembler.py index 1ab4e8c..3a8caf4 100644 --- a/app/core/kg_slice_assembler.py +++ b/app/core/kg_slice_assembler.py @@ -9,6 +9,8 @@ from app.k8s_transform.workload_transformer import WorkloadToRDFTransformer from app.kg.graph import Graph from app.kg.inmemory_graph import InMemoryGraph +from app.metric_transform.node_metric_transformer import NodeMetricToGraphTransformer +from app.metric_transform.pod_metric_transformer import PodMetricToGraphTransformer class KGSliceAssembler: @@ -58,6 +60,13 @@ def transform_resource( transformer.transform(context) def transform_metrics( - self, now: int, snapshot: MetricSnapshot, sink: Graph + self, + now: int, + snapshot: MetricSnapshot, + sink: Graph, ) -> None: - pass + context = TransformationContext(now) + node_transformer = NodeMetricToGraphTransformer(snapshot.node_metrics, sink) + node_transformer.transform(context) + pod_transformer = PodMetricToGraphTransformer(snapshot.pod_metrics, sink) + pod_transformer.transform(context) diff --git a/app/core/metric_repository.py b/app/core/metric_repository.py index d78806b..fd08919 100644 --- a/app/core/metric_repository.py +++ b/app/core/metric_repository.py @@ -1,4 +1,4 @@ -from typing import List +from typing import List, Optional import asyncio from dataclasses import dataclass @@ -22,21 +22,13 @@ def get_by_name(self) -> QueryResultParser: @dataclass class MetricQuery: - measurement_name: str + measurement_id: str + subresource: Optional[str] + source: str + unit: str + property: str query: str result_parser: ResultParserId - source: str - - -@dataclass -class Metric: - identifier: str - kind: str - measurement_name: str - metric_name: str - value: float - timestamp: int - source: str class MetricRepository: diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index 3081a46..42a4cee 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -34,7 +34,7 @@ def setUp(self) -> None: self.running_event.set() self.runner = asyncio.Runner() self.settings = KGBuilderSettings( - builder_tick_seconds=1, influxdb_queries=QuerySettings() + builder_tick_seconds=1, queries=QuerySettings() ) def test_build(self) -> None: diff --git a/app/core/test_metric_repository.py b/app/core/test_metric_repository.py index 2ded797..f5a4630 100644 --- a/app/core/test_metric_repository.py +++ b/app/core/test_metric_repository.py @@ -19,10 +19,13 @@ def test_query_one(self) -> None: self.client.mock_query("test_query", [expected]) now = 1 query = MetricQuery( - measurement_name="measurement", + measurement_id="measurement", + subresource=None, + source="source", query="test_query", + unit="bytes", + property="property", result_parser=ResultParserId.SIMPLE_RESULT_PARSER, - source="source", ) actual = asyncio.run(self.repository.query_one(now, query)) @@ -37,22 +40,31 @@ def test_query_many(self) -> None: self.client.mock_query("test_query3", [expected3]) now = 1 query1 = MetricQuery( - measurement_name="measurement", + measurement_id="measurement", + subresource=None, + source="source", + unit="bytes", + property="property", query="test_query1", result_parser=ResultParserId.SIMPLE_RESULT_PARSER, - source="source", ) query2 = MetricQuery( - measurement_name="measurement", + measurement_id="measurement", + subresource=None, + source="source", query="test_query2", + unit="bytes", + property="property", result_parser=ResultParserId.SIMPLE_RESULT_PARSER, - source="source", ) query3 = MetricQuery( - measurement_name="measurement", + measurement_id="measurement", + subresource=None, + source="source", query="test_query3", + unit="bytes", + property="property", result_parser=ResultParserId.SIMPLE_RESULT_PARSER, - source="source", ) actual = asyncio.run(self.repository.query_many(now, [query1, query2, query3])) diff --git a/app/core/types.py b/app/core/types.py index 993c0c2..3054707 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -1,8 +1,9 @@ -from typing import List +from typing import List, Tuple from dataclasses import dataclass, field from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.metric_repository import MetricQuery from app.core.metric_value import MetricValue from app.kg.graph import Graph @@ -23,11 +24,25 @@ class DKGSlice: timestamp: int +# TODO remove +@dataclass +class Metric: + identifier: str + kind: str + measurement_name: str + metric_name: str + value: float + timestamp: int + source: str + + @dataclass class MetricSnapshot: - pod_metrics: List[MetricValue] = field(default_factory=list) - node_metrics: List[MetricValue] = field(default_factory=list) - workload_metrics: List[MetricValue] = field(default_factory=list) + pod_metrics: List[Tuple[MetricQuery, MetricValue]] = field(default_factory=list) + node_metrics: List[Tuple[MetricQuery, MetricValue]] = field(default_factory=list) + workload_metrics: List[Tuple[MetricQuery, MetricValue]] = field( + default_factory=list + ) @dataclass diff --git a/app/metric_transform/__fixture__/node.jsonld b/app/metric_transform/__fixture__/node.jsonld new file mode 100644 index 0000000..722e380 --- /dev/null +++ b/app/metric_transform/__fixture__/node.jsonld @@ -0,0 +1,76 @@ +{ + "@context": { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + }, + "@graph": [ + { + "@id": "cluster:worker1", + "@type": "glc:WorkProducingResource", + "glc:hasID": "cluster:worker1", + "glc:hasMeasurement": { + "@id": "cluster:worker1.Energy.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Energy.Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:worker1.Energy.Usage", + "glc:measuredIn": "glc:milliwatt", + "glc:relatesToMeasurementProperty": "glc:Energy.Usage" + } + }, + { + "@id": "cluster:worker1.CPU", + "@type": "glc:WorkProducingResource", + "glc:hasID": "cluster:worker1.CPU", + "glc:hasMeasurement": { + "@id": "cluster:worker1.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:worker1.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + } + }, + { + "@id": "glc:CPU.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Usage" + }, + { + "@id": "glc:Energy.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Energy.Usage" + }, + { + "@id": "glc:cAdvisor", + "@type": "glc:MeasuringResource", + "glc:hasDescription": { + "@set": [ + "cAdvisor" + ] + }, + "glc:hasID": "glc:cAdvisor", + "glc:makes": { + "@set": [ + "cluster:worker1.CPU.Usage", + "cluster:worker1.Energy.Usage" + ] + } + }, + { + "@id": "glc:coreseconds", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:coreseconds" + }, + { + "@id": "glc:milliwatt", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:milliwatt" + } + ] +} diff --git a/app/metric_transform/__fixture__/node.turtle b/app/metric_transform/__fixture__/node.turtle new file mode 100644 index 0000000..30729b5 --- /dev/null +++ b/app/metric_transform/__fixture__/node.turtle @@ -0,0 +1,32 @@ +cluster:worker1 rdf:type glc:WorkProducingResource . +cluster:worker1 glc:hasID cluster:worker1 . +cluster:worker1 glc:hasMeasurement cluster:worker1.Energy.Usage . +cluster:worker1.CPU rdf:type glc:WorkProducingResource . +cluster:worker1.CPU glc:hasID cluster:worker1.CPU . +cluster:worker1.CPU glc:hasMeasurement cluster:worker1.CPU.Usage . +cluster:worker1.CPU.Usage rdf:type glc:Measurement . +cluster:worker1.CPU.Usage glc:hasDescription "Usage" . +cluster:worker1.CPU.Usage glc:hasTimestamp 17100500^^ . +cluster:worker1.CPU.Usage glc:hasValue 42.0^^ . +cluster:worker1.CPU.Usage glc:hasID cluster:worker1.CPU.Usage . +cluster:worker1.CPU.Usage glc:measuredIn glc:coreseconds . +cluster:worker1.CPU.Usage glc:relatesToMeasurementProperty glc:CPU.Usage . +cluster:worker1.Energy.Usage rdf:type glc:Measurement . +cluster:worker1.Energy.Usage glc:hasDescription "Energy.Usage" . +cluster:worker1.Energy.Usage glc:hasTimestamp 17100500^^ . +cluster:worker1.Energy.Usage glc:hasValue 42.0^^ . +cluster:worker1.Energy.Usage glc:hasID cluster:worker1.Energy.Usage . +cluster:worker1.Energy.Usage glc:measuredIn glc:milliwatt . +cluster:worker1.Energy.Usage glc:relatesToMeasurementProperty glc:Energy.Usage . +glc:CPU.Usage rdf:type glc:MeasurementProperty . +glc:CPU.Usage glc:hasID glc:CPU.Usage . +glc:Energy.Usage rdf:type glc:MeasurementProperty . +glc:Energy.Usage glc:hasID glc:Energy.Usage . +glc:cAdvisor rdf:type glc:MeasuringResource . +glc:cAdvisor glc:hasDescription ("cAdvisor") . +glc:cAdvisor glc:hasID glc:cAdvisor . +glc:cAdvisor glc:makes (cluster:worker1.CPU.Usage cluster:worker1.Energy.Usage) . +glc:coreseconds rdf:type glc:MeasurementUnit . +glc:coreseconds glc:hasID glc:coreseconds . +glc:milliwatt rdf:type glc:MeasurementUnit . +glc:milliwatt glc:hasID glc:milliwatt . diff --git a/app/metric_transform/__fixture__/pod1.jsonld b/app/metric_transform/__fixture__/pod1.jsonld new file mode 100644 index 0000000..60db92a --- /dev/null +++ b/app/metric_transform/__fixture__/pod1.jsonld @@ -0,0 +1,75 @@ +{ + "@context": { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + }, + "@graph": [ + { + "@id": "cluster:pod1", + "@type": "glc:WorkProducingResource", + "glc:hasID": "cluster:pod1", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:pod1.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "CPU.Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:pod1.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + }, + { + "@id": "cluster:pod1.Energy.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Energy.Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:pod1.Energy.Usage", + "glc:measuredIn": "glc:milliwatt", + "glc:relatesToMeasurementProperty": "glc:Energy.Usage" + } + ] + } + }, + { + "@id": "glc:CPU.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Usage" + }, + { + "@id": "glc:Energy.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Energy.Usage" + }, + { + "@id": "glc:cAdvisor", + "@type": "glc:MeasuringResource", + "glc:hasDescription": { + "@set": [ + "cAdvisor" + ] + }, + "glc:hasID": "glc:cAdvisor", + "glc:makes": { + "@set": [ + "cluster:pod1.CPU.Usage", + "cluster:pod1.Energy.Usage" + ] + } + }, + { + "@id": "glc:coreseconds", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:coreseconds" + }, + { + "@id": "glc:milliwatt", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:milliwatt" + } + ] +} diff --git a/app/metric_transform/__fixture__/pod1.turtle b/app/metric_transform/__fixture__/pod1.turtle new file mode 100644 index 0000000..5e4c1e4 --- /dev/null +++ b/app/metric_transform/__fixture__/pod1.turtle @@ -0,0 +1,29 @@ +cluster:pod1 rdf:type glc:WorkProducingResource . +cluster:pod1 glc:hasID cluster:pod1 . +cluster:pod1 glc:hasMeasurement (cluster:pod1.CPU.Usage cluster:pod1.Energy.Usage) . +cluster:pod1.CPU.Usage rdf:type glc:Measurement . +cluster:pod1.CPU.Usage glc:hasDescription "CPU.Usage" . +cluster:pod1.CPU.Usage glc:hasTimestamp 17100500^^ . +cluster:pod1.CPU.Usage glc:hasValue 42.0^^ . +cluster:pod1.CPU.Usage glc:hasID cluster:pod1.CPU.Usage . +cluster:pod1.CPU.Usage glc:measuredIn glc:coreseconds . +cluster:pod1.CPU.Usage glc:relatesToMeasurementProperty glc:CPU.Usage . +cluster:pod1.Energy.Usage rdf:type glc:Measurement . +cluster:pod1.Energy.Usage glc:hasDescription "Energy.Usage" . +cluster:pod1.Energy.Usage glc:hasTimestamp 17100500^^ . +cluster:pod1.Energy.Usage glc:hasValue 42.0^^ . +cluster:pod1.Energy.Usage glc:hasID cluster:pod1.Energy.Usage . +cluster:pod1.Energy.Usage glc:measuredIn glc:milliwatt . +cluster:pod1.Energy.Usage glc:relatesToMeasurementProperty glc:Energy.Usage . +glc:CPU.Usage rdf:type glc:MeasurementProperty . +glc:CPU.Usage glc:hasID glc:CPU.Usage . +glc:Energy.Usage rdf:type glc:MeasurementProperty . +glc:Energy.Usage glc:hasID glc:Energy.Usage . +glc:cAdvisor rdf:type glc:MeasuringResource . +glc:cAdvisor glc:hasDescription ("cAdvisor") . +glc:cAdvisor glc:hasID glc:cAdvisor . +glc:cAdvisor glc:makes (cluster:pod1.CPU.Usage cluster:pod1.Energy.Usage) . +glc:coreseconds rdf:type glc:MeasurementUnit . +glc:coreseconds glc:hasID glc:coreseconds . +glc:milliwatt rdf:type glc:MeasurementUnit . +glc:milliwatt glc:hasID glc:milliwatt . diff --git a/app/metric_transform/metric_transformer.py b/app/metric_transform/metric_transformer.py index e69de29..fa44168 100644 --- a/app/metric_transform/metric_transformer.py +++ b/app/metric_transform/metric_transformer.py @@ -0,0 +1,18 @@ +from typing import List, Tuple + +from app.core.metric_repository import MetricQuery +from app.core.metric_value import MetricValue +from app.k8s_transform.transformation_context import TransformationContext +from app.kg.graph import Graph + + +class MetricToGraphTransformerBase: + source: List[Tuple[MetricQuery, MetricValue]] + sink: Graph + + def __init__(self, metrics: List[Tuple[MetricQuery, MetricValue]], sink: Graph): + self.metrics = metrics + self.sink = sink + + def transform(self, context: TransformationContext) -> None: + raise NotImplementedError diff --git a/app/metric_transform/node_metric_transformer.py b/app/metric_transform/node_metric_transformer.py new file mode 100644 index 0000000..f5f4a3c --- /dev/null +++ b/app/metric_transform/node_metric_transformer.py @@ -0,0 +1,47 @@ +from typing import List, Tuple + +from app.core.metric_repository import MetricQuery +from app.core.metric_value import MetricValue +from app.k8s_transform.transformation_context import TransformationContext +from app.k8s_transform.transformer_base import TransformerBase +from app.k8s_transform.upper_ontology_base import UpperOntologyBase +from app.kg.graph import Graph +from app.kg.iri import IRI +from app.metric_transform.metric_transformer import MetricToGraphTransformerBase + + +class NodeMetricToGraphTransformer(MetricToGraphTransformerBase, UpperOntologyBase): + def __init__(self, metrics: List[Tuple[MetricQuery, MetricValue]], sink: Graph): + MetricToGraphTransformerBase.__init__(self, metrics, sink) + UpperOntologyBase.__init__(self, sink) + + def transform(self, context: TransformationContext) -> None: + for query, result in self.metrics: + node_id = self.get_node_id(result.resource_id) + parent_resource_id = ( + node_id.dot(query.subresource) if query.subresource else node_id + ) + measurement_id = parent_resource_id.dot(query.measurement_id) + property_id = IRI(self.GLACIATION_PREFIX, query.property) + unit_id = IRI(self.GLACIATION_PREFIX, query.unit) + source_id = IRI(self.GLACIATION_PREFIX, query.source) + self.add_measurement_property(property_id, None) + self.add_unit(unit_id, None) + self.add_measuring_resource(source_id, query.source) + self.add_work_producing_resource(parent_resource_id, None) + self.add_measurement( + measurement_id, + query.measurement_id, + result.value, + result.timestamp, + unit_id, + property_id, + source_id, + ) + self.sink.add_relation( + parent_resource_id, self.HAS_MEASUREMENT, measurement_id + ) + + def get_node_id(self, name: str) -> IRI: + resource_id = IRI(TransformerBase.CLUSTER_PREFIX, name) + return resource_id diff --git a/app/metric_transform/pod_metric_transformer.py b/app/metric_transform/pod_metric_transformer.py new file mode 100644 index 0000000..d7a5641 --- /dev/null +++ b/app/metric_transform/pod_metric_transformer.py @@ -0,0 +1,42 @@ +from typing import List, Tuple + +from app.core.metric_repository import MetricQuery +from app.core.metric_value import MetricValue +from app.k8s_transform.transformation_context import TransformationContext +from app.k8s_transform.transformer_base import TransformerBase +from app.k8s_transform.upper_ontology_base import UpperOntologyBase +from app.kg.graph import Graph +from app.kg.iri import IRI +from app.metric_transform.metric_transformer import MetricToGraphTransformerBase + + +class PodMetricToGraphTransformer(MetricToGraphTransformerBase, UpperOntologyBase): + def __init__(self, metrics: List[Tuple[MetricQuery, MetricValue]], sink: Graph): + MetricToGraphTransformerBase.__init__(self, metrics, sink) + UpperOntologyBase.__init__(self, sink) + + def transform(self, context: TransformationContext) -> None: + for query, result in self.metrics: + pod_id = self.get_pod_id(result.resource_id) + measurement_id = pod_id.dot(query.measurement_id) + property_id = IRI(self.GLACIATION_PREFIX, query.property) + unit_id = IRI(self.GLACIATION_PREFIX, query.unit) + source_id = IRI(self.GLACIATION_PREFIX, query.source) + self.add_measurement_property(property_id, None) + self.add_unit(unit_id, None) + self.add_measuring_resource(source_id, query.source) + self.add_work_producing_resource(pod_id, None) + self.add_measurement( + measurement_id, + query.measurement_id, + result.value, + result.timestamp, + unit_id, + property_id, + source_id, + ) + self.sink.add_relation(pod_id, self.HAS_MEASUREMENT, measurement_id) + + def get_pod_id(self, name: str) -> IRI: + resource_id = IRI(TransformerBase.CLUSTER_PREFIX, name) + return resource_id diff --git a/app/metric_transform/test_base.py b/app/metric_transform/test_base.py new file mode 100644 index 0000000..8f664df --- /dev/null +++ b/app/metric_transform/test_base.py @@ -0,0 +1,48 @@ +from typing import Any, Dict, List + +import json +from io import FileIO +from unittest import TestCase + +from app.k8s_transform.upper_ontology_base import UpperOntologyBase +from app.kg.id_base import IdBase +from app.kg.iri import IRI +from app.serialize.jsonld_configuration import JsonLDConfiguration + + +class MetricTransformTestBase(TestCase): + def load_turtle(self, name: str) -> str: + with FileIO(f"app/metric_transform/__fixture__/{name}.turtle") as f: + return f.readall().decode("utf-8") + + def load_jsonld(self, name: str) -> str: + with FileIO(f"app/metric_transform/__fixture__/{name}.jsonld") as f: + return json.load(f) # type: ignore + + def load_json(self, name: str) -> Dict[str, Any]: + with FileIO(f"app/metric_transform/__fixture__/{name}.json") as f: + return json.load(f) # type: ignore + + def load_json_list(self, name: str) -> List[Dict[str, Any]]: + with FileIO(f"app/metric_transform/__fixture__/{name}.json") as f: + return json.load(f) # type: ignore + + def get_jsonld_config(self) -> JsonLDConfiguration: + contexts: Dict[IdBase, Dict[str, Any]] = { + JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + } + } + return JsonLDConfiguration( + contexts, + { + IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasuringResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementUnit"), + }, + ) diff --git a/app/metric_transform/test_node_metric_transformer.py b/app/metric_transform/test_node_metric_transformer.py new file mode 100644 index 0000000..8219f09 --- /dev/null +++ b/app/metric_transform/test_node_metric_transformer.py @@ -0,0 +1,74 @@ +import json +from io import StringIO + +from app.core.metric_repository import MetricQuery, ResultParserId +from app.core.metric_value import MetricValue +from app.k8s_transform.transformation_context import TransformationContext +from app.kg.inmemory_graph import InMemoryGraph +from app.metric_transform.node_metric_transformer import NodeMetricToGraphTransformer +from app.metric_transform.test_base import MetricTransformTestBase +from app.serialize.jsonld_serializer import JsonLDSerialializer +from app.serialize.turtle_serializer import TurtleSerialializer + + +class NodeMetricToGraphTransformerTest(MetricTransformTestBase): + test_metrics = [ + ( + MetricQuery( + measurement_id="Usage", + subresource="CPU", + source="cAdvisor", + unit="coreseconds", + property="CPU.Usage", + query="query", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + ), + MetricValue( + metric_id="my_metric", + resource_id="worker1", + timestamp=17100500, + value=42.0, + ), + ), + ( + MetricQuery( + measurement_id="Energy.Usage", + subresource=None, + source="cAdvisor", + unit="milliwatt", + property="Energy.Usage", + query="query", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + ), + MetricValue( + metric_id="my_metric", + resource_id="worker1", + timestamp=17100500, + value=42.0, + ), + ), + ] + + def setUp(self): + self.maxDiff = None + + def test_transform_turtle(self) -> None: + node_turtle = self.load_turtle("node") + + buffer = StringIO() + graph = InMemoryGraph() + context = TransformationContext(123) + NodeMetricToGraphTransformer(self.test_metrics, graph).transform(context) + TurtleSerialializer().write(buffer, graph) + self.assertEqual(buffer.getvalue(), node_turtle) + + def test_transform_jsonld(self) -> None: + node_jsonld = self.load_jsonld("node") + + buffer = StringIO() + graph = InMemoryGraph() + context = TransformationContext(123) + transformer = NodeMetricToGraphTransformer(self.test_metrics, graph) + transformer.transform(context) + JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) + self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) diff --git a/app/metric_transform/test_pod_metric_transformer.py b/app/metric_transform/test_pod_metric_transformer.py new file mode 100644 index 0000000..0a5ee47 --- /dev/null +++ b/app/metric_transform/test_pod_metric_transformer.py @@ -0,0 +1,74 @@ +import json +from io import StringIO + +from app.core.metric_repository import MetricQuery, ResultParserId +from app.core.metric_value import MetricValue +from app.k8s_transform.transformation_context import TransformationContext +from app.kg.inmemory_graph import InMemoryGraph +from app.metric_transform.pod_metric_transformer import PodMetricToGraphTransformer +from app.metric_transform.test_base import MetricTransformTestBase +from app.serialize.jsonld_serializer import JsonLDSerialializer +from app.serialize.turtle_serializer import TurtleSerialializer + + +class PodMetricToGraphTransformerTest(MetricTransformTestBase): + test_metrics = [ + ( + MetricQuery( + measurement_id="CPU.Usage", + subresource=None, + source="cAdvisor", + unit="coreseconds", + property="CPU.Usage", + query="query", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + ), + MetricValue( + metric_id="my_metric", + resource_id="pod1", + timestamp=17100500, + value=42.0, + ), + ), + ( + MetricQuery( + measurement_id="Energy.Usage", + subresource=None, + source="cAdvisor", + unit="milliwatt", + property="Energy.Usage", + query="query", + result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + ), + MetricValue( + metric_id="my_metric", + resource_id="pod1", + timestamp=17100500, + value=42.0, + ), + ), + ] + + def setUp(self): + self.maxDiff = None + + def test_transform_turtle(self) -> None: + node_turtle = self.load_turtle("pod1") + + buffer = StringIO() + graph = InMemoryGraph() + context = TransformationContext(123) + PodMetricToGraphTransformer(self.test_metrics, graph).transform(context) + TurtleSerialializer().write(buffer, graph) + self.assertEqual(buffer.getvalue(), node_turtle) + + def test_transform_jsonld(self) -> None: + node_jsonld = self.load_jsonld("pod1") + + buffer = StringIO() + graph = InMemoryGraph() + context = TransformationContext(123) + transformer = PodMetricToGraphTransformer(self.test_metrics, graph) + transformer.transform(context) + JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) + self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) From 6abd0adb363370022c71789b66471e68149d2f27 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 11 Jun 2024 15:45:33 +0200 Subject: [PATCH 23/61] HHT-669: Slicing Strategy tests --- app/clients/k8s/k8s_client.py | 88 +++++- app/clients/k8s/k8s_client_impl.py | 3 + app/clients/k8s/mock_k8s_client.py | 8 + .../snapshot/minimal/k8s_cluster.yaml | 28 ++ .../snapshot/minimal/k8s_deployments.yaml | 54 ++++ .../snapshot/minimal/k8s_nodes.yaml | 31 ++ .../snapshot/minimal/k8s_pods.yaml | 49 +++ .../snapshot/minimal/k8s_replicasets.yaml | 24 ++ .../snapshot/minimal/metric_nodes.yaml | 52 ++++ .../snapshot/minimal/metric_pods.yaml | 52 ++++ .../snapshot/multinode/k8s_cluster.yaml | 28 ++ .../snapshot/multinode/k8s_daemonsets.yaml | 38 +++ .../snapshot/multinode/k8s_deployments.yaml | 53 ++++ .../snapshot/multinode/k8s_jobs.yaml | 19 ++ .../snapshot/multinode/k8s_nodes.yaml | 61 ++++ .../snapshot/multinode/k8s_pods.yaml | 292 ++++++++++++++++++ .../snapshot/multinode/k8s_replicasets.yaml | 25 ++ .../snapshot/multinode/k8s_statefullsets.yaml | 30 ++ .../snapshot/multinode/metric_nodes.yaml | 117 +++++++ .../snapshot/multinode/metric_pods.yaml | 212 +++++++++++++ app/core/kg_builder.py | 3 +- app/core/slice_for_node_strategy.py | 106 ++++++- app/core/test_kg_builder.py | 49 ++- app/core/test_kgslice_assembler.py | 31 +- app/core/test_slice_for_node_strategy.py | 118 ++++++- app/core/test_snapshot_base.py | 73 +++++ app/core/types.py | 27 +- app/kgexporter_context_builder.py | 4 +- app/test_kgexporter_context.py | 4 +- app/test_pydantic_yaml.py | 4 +- 30 files changed, 1648 insertions(+), 35 deletions(-) create mode 100644 app/core/__fixture__/snapshot/minimal/k8s_cluster.yaml create mode 100644 app/core/__fixture__/snapshot/minimal/k8s_deployments.yaml create mode 100644 app/core/__fixture__/snapshot/minimal/k8s_nodes.yaml create mode 100644 app/core/__fixture__/snapshot/minimal/k8s_pods.yaml create mode 100644 app/core/__fixture__/snapshot/minimal/k8s_replicasets.yaml create mode 100644 app/core/__fixture__/snapshot/minimal/metric_nodes.yaml create mode 100644 app/core/__fixture__/snapshot/minimal/metric_pods.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_cluster.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_daemonsets.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_deployments.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_jobs.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_nodes.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_pods.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_replicasets.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_statefullsets.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/metric_nodes.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/metric_pods.yaml create mode 100644 app/core/test_snapshot_base.py diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py index 5275103..48538bd 100644 --- a/app/clients/k8s/k8s_client.py +++ b/app/clients/k8s/k8s_client.py @@ -1,8 +1,10 @@ -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional, Set import asyncio from dataclasses import dataclass, field +from jsonpath_ng.ext import parse + @dataclass class ResourceSnapshot: @@ -15,10 +17,84 @@ class ResourceSnapshot: daemonsets: List[Dict[str, Any]] = field(default_factory=list) replicasets: List[Dict[str, Any]] = field(default_factory=list) + def get_resource_names(self) -> Set[str]: + names: Set[str] = set() + names = {*names, *{self.get_resource_name(resource) for resource in self.pods}} + names = {*names, *{self.get_resource_name(resource) for resource in self.nodes}} + names = { + *names, + *{self.get_resource_name(resource) for resource in self.deployments}, + } + names = {*names, *{self.get_resource_name(resource) for resource in self.jobs}} + names = { + *names, + *{self.get_resource_name(resource) for resource in self.statefullsets}, + } + names = { + *names, + *{self.get_resource_name(resource) for resource in self.daemonsets}, + } + names = { + *names, + *{self.get_resource_name(resource) for resource in self.replicasets}, + } + return names + + def find_resources_by_kind_and_identity( + self, kind: str, identity: str + ) -> List[Dict[str, Any]]: + resources = self.get_resources_by_kind(kind) + if resources: + return [ + resource + for resource in resources + if self.get_resource_name(resource) == identity + ] + else: + return [] + + def get_resource_name(self, node: Dict[str, Any]) -> str: + for match in parse("$.metadata.name").find(node): + return str(match.value) + raise Exception("Metadata does not contain name.") + + def get_resources_by_kind(self, kind: str) -> Optional[List[Dict[str, Any]]]: + if kind == "Pod": + return self.pods + elif kind == "Node": + return self.nodes + elif kind == "Deployment": + return self.deployments + elif kind == "Job": + return self.jobs + elif kind == "StatefulSet": + return self.statefullsets + elif kind == "DaemonSet": + return self.daemonsets + elif kind == "ReplicaSet": + return self.replicasets + return None + + def add_resources_by_kind(self, kind: str, resources: List[Dict[str, Any]]) -> None: + if kind == "Pod": + self.pods.extend(resources) + elif kind == "Node": + self.nodes.extend(resources) + elif kind == "Deployment": + self.deployments.extend(resources) + elif kind == "Job": + self.jobs.extend(resources) + elif kind == "StatefulSet": + self.statefullsets.extend(resources) + elif kind == "DaemonSet": + self.daemonsets.extend(resources) + elif kind == "ReplicaSet": + self.replicasets.extend(resources) + class K8SClient: async def fetch_snapshot(self) -> ResourceSnapshot: - result = await asyncio.gather( + resources = await asyncio.gather( self.get_nodes(), self.get_pods(), self.get_deployments(), @@ -35,9 +111,10 @@ async def fetch_snapshot(self) -> ResourceSnapshot: statefullsets, daemonsets, replicasets, - ) = result + ) = resources + cluster_info = await self.get_cluster_info() return ResourceSnapshot( - cluster=dict(), + cluster=cluster_info, pods=pods, nodes=nodes, deployments=deployments, @@ -67,3 +144,6 @@ async def get_statefullsets(self) -> List[Dict[str, Any]]: async def get_jobs(self) -> List[Dict[str, Any]]: raise NotImplementedError + + async def get_cluster_info(self) -> Dict[str, Any]: + raise NotImplementedError diff --git a/app/clients/k8s/k8s_client_impl.py b/app/clients/k8s/k8s_client_impl.py index 51b216f..36e8414 100644 --- a/app/clients/k8s/k8s_client_impl.py +++ b/app/clients/k8s/k8s_client_impl.py @@ -41,6 +41,9 @@ async def get_statefullsets(self) -> List[Dict[str, Any]]: async def get_jobs(self) -> List[Dict[str, Any]]: return await self.get_resource("Job") + async def get_cluster_info(self) -> Dict[str, Any]: + raise NotImplementedError("get_cluster_info() is not implemented") + async def get_resource(self, kind: str) -> List[Dict[str, Any]]: api = self.client.resources.get(api_version="v1", kind=kind) return [item.to_dict() for item in api.get().items] diff --git a/app/clients/k8s/mock_k8s_client.py b/app/clients/k8s/mock_k8s_client.py index 44c4d80..fd38a71 100644 --- a/app/clients/k8s/mock_k8s_client.py +++ b/app/clients/k8s/mock_k8s_client.py @@ -11,6 +11,7 @@ class MockK8SClient(K8SClient): daemonsets: List[Dict[str, Any]] statefullsets: List[Dict[str, Any]] jobs: List[Dict[str, Any]] + cluster: Dict[str, Any] def __init__(self): self.nodes = [] @@ -20,6 +21,13 @@ def __init__(self): self.daemonsets = [] self.statefullsets = [] self.jobs = [] + self.cluster = {} + + def mock_cluster(self, cluster: Dict[str, Any]) -> None: + self.cluster = cluster + + async def get_cluster_info(self) -> Dict[str, Any]: + return self.cluster def mock_nodes(self, nodes: List[Dict[str, Any]]) -> None: self.nodes = nodes diff --git a/app/core/__fixture__/snapshot/minimal/k8s_cluster.yaml b/app/core/__fixture__/snapshot/minimal/k8s_cluster.yaml new file mode 100644 index 0000000..b54688a --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/k8s_cluster.yaml @@ -0,0 +1,28 @@ +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: kubeadm-config + namespace: kube-system +data: + ClusterConfiguration: | + apiServer: + extraArgs: + authorization-mode: Node,RBAC + timeoutForControlPlane: 4m0s + apiVersion: kubeadm.k8s.io/v1beta3 + certificatesDir: /etc/kubernetes/pki + clusterName: kubernetes + controllerManager: {} + dns: {} + etcd: + local: + dataDir: /var/lib/etcd + imageRepository: registry.k8s.io + kind: ClusterConfiguration + kubernetesVersion: v1.26.14 + networking: + dnsDomain: cluster.local + podSubnet: 10.244.0.0/16 + serviceSubnet: 10.96.0.0/12 + scheduler: {} diff --git a/app/core/__fixture__/snapshot/minimal/k8s_deployments.yaml b/app/core/__fixture__/snapshot/minimal/k8s_deployments.yaml new file mode 100644 index 0000000..36eca58 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/k8s_deployments.yaml @@ -0,0 +1,54 @@ +- apiVersion: apps/v1 + kind: Deployment + metadata: + name: coredns + uid: 1 + namespace: kube-system + ownerReferences: + - apiVersion: minio.min.io/v2 + kind: CRD + name: crd-resource + labels: + k8s-app: kube-dns + spec: + replicas: 2 + selector: + matchLabels: + k8s-app: kube-dns + template: + metadata: + labels: + k8s-app: kube-dns + annotations: + glaciation-project.eu/resource/limits/energy: '100' + glaciation-project.eu/resource/limits/network: '1010' + glaciation-project.eu/resource/limits/gpu: '101' + glaciation-project.eu/resource/requests/energy: '100' + glaciation-project.eu/resource/requests/network: '1010' + glaciation-project.eu/resource/requests/gpu: '101' + spec: + initContainers: + - name: init-coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + containers: + - name: coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + - name: coredns-other + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + ephemeral-storage: 100500 + schedulerName: default-scheduler diff --git a/app/core/__fixture__/snapshot/minimal/k8s_nodes.yaml b/app/core/__fixture__/snapshot/minimal/k8s_nodes.yaml new file mode 100644 index 0000000..1b4f86c --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/k8s_nodes.yaml @@ -0,0 +1,31 @@ +- apiVersion: v1 + kind: Node + metadata: + name: glaciation-test-master01 + uid: 1 + annotations: + glaciation-project.eu/metric/node-energy-index: '1001' + spec: + status: + allocatable: + cpu: '4' + ephemeral-storage: '47266578354' + hugepages-1Gi: '0' + hugepages-2Mi: '0' + memory: 16283144Ki + pods: '110' + capacity: + cpu: '4' + ephemeral-storage: 51287520Ki + hugepages-1Gi: '0' + hugepages-2Mi: '0' + memory: 16385544Ki + pods: '110' + conditions: + - lastHeartbeatTime: '2024-02-27T10:37:54Z' + lastTransitionTime: '2024-02-13T13:53:43Z' + message: kubelet is posting ready status. AppArmor enabled + reason: KubeletReady + status: 'True' + type: Ready + nodeInfo: diff --git a/app/core/__fixture__/snapshot/minimal/k8s_pods.yaml b/app/core/__fixture__/snapshot/minimal/k8s_pods.yaml new file mode 100644 index 0000000..ed27fa9 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/k8s_pods.yaml @@ -0,0 +1,49 @@ +- kind: Pod + metadata: + name: coredns-787d4945fb-l85r5 + namespace: kube-system + uid: 1 + ownerReferences: + - apiVersion: apps/v1 + kind: ReplicaSet + name: coredns-787d4945fb + spec: + containers: + - name: coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + nodeName: glaciation-test-master01 + schedulerName: default-scheduler + status: + conditions: + - lastProbeTime: + lastTransitionTime: '2024-02-13T13:53:43Z' + status: 'True' + type: Initialized + - lastProbeTime: + lastTransitionTime: '2024-02-13T15:09:49Z' + status: 'True' + type: Ready + - lastProbeTime: + lastTransitionTime: '2024-02-13T15:09:49Z' + status: 'True' + type: ContainersReady + - lastProbeTime: + lastTransitionTime: '2024-02-13T13:53:43Z' + status: 'True' + type: PodScheduled + containerStatuses: + - containerID: containerd://0ce09d1e8fdff70a58902bb3e73efafa76035dddbe9cec8b4115ac80d09f9963 + lastState: {} + name: coredns + ready: true + started: true + state: + running: + startedAt: '2024-02-13T13:53:44Z' + phase: Running + startTime: '2024-02-13T13:53:43Z' diff --git a/app/core/__fixture__/snapshot/minimal/k8s_replicasets.yaml b/app/core/__fixture__/snapshot/minimal/k8s_replicasets.yaml new file mode 100644 index 0000000..a151016 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/k8s_replicasets.yaml @@ -0,0 +1,24 @@ +- apiVersion: apps/v1 + kind: ReplicaSet + metadata: + name: coredns-787d4945fb + namespace: kube-system + uid: 1 + ownerReferences: + - apiVersion: apps/v1 + kind: Deployment + name: coredns + spec: + template: + metadata: + labels: + spec: + containers: + - name: coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + status: diff --git a/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml b/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml new file mode 100644 index 0000000..1a2c876 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml @@ -0,0 +1,52 @@ +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: test1 + result_parser: SimpleResultParser + value: + metric_id: cpu_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 41.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: test2 + result_parser: SimpleResultParser + value: + metric_id: ram_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 42.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytesß + property: Storage.Usage + query: test3 + result_parser: SimpleResultParser + value: + metric_id: eph_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 43.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes + property: Network.Usage + query: test4 + result_parser: SimpleResultParser + value: + metric_id: net_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 43.0 diff --git a/app/core/__fixture__/snapshot/minimal/metric_pods.yaml b/app/core/__fixture__/snapshot/minimal/metric_pods.yaml new file mode 100644 index 0000000..9b82833 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/metric_pods.yaml @@ -0,0 +1,52 @@ +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: test5 + result_parser: SimpleResultParser + value: + metric_id: pod_cpu_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 41.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: test6 + result_parser: SimpleResultParser + value: + metric_id: pod_ram_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 42.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytes + property: Storage.Usage + query: test7 + result_parser: SimpleResultParser + value: + metric_id: pod_eph_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 43.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes + property: Network.Usage + query: test8 + result_parser: SimpleResultParser + value: + metric_id: pod_net_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 43.0 diff --git a/app/core/__fixture__/snapshot/multinode/k8s_cluster.yaml b/app/core/__fixture__/snapshot/multinode/k8s_cluster.yaml new file mode 100644 index 0000000..b54688a --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_cluster.yaml @@ -0,0 +1,28 @@ +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: kubeadm-config + namespace: kube-system +data: + ClusterConfiguration: | + apiServer: + extraArgs: + authorization-mode: Node,RBAC + timeoutForControlPlane: 4m0s + apiVersion: kubeadm.k8s.io/v1beta3 + certificatesDir: /etc/kubernetes/pki + clusterName: kubernetes + controllerManager: {} + dns: {} + etcd: + local: + dataDir: /var/lib/etcd + imageRepository: registry.k8s.io + kind: ClusterConfiguration + kubernetesVersion: v1.26.14 + networking: + dnsDomain: cluster.local + podSubnet: 10.244.0.0/16 + serviceSubnet: 10.96.0.0/12 + scheduler: {} diff --git a/app/core/__fixture__/snapshot/multinode/k8s_daemonsets.yaml b/app/core/__fixture__/snapshot/multinode/k8s_daemonsets.yaml new file mode 100644 index 0000000..30d10c8 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_daemonsets.yaml @@ -0,0 +1,38 @@ +- apiVersion: apps/v1 + kind: DaemonSet + metadata: + name: kube-flannel-ds + namespace: monitoring + spec: + template: + metadata: + labels: + app.kubernetes.io/component: exporter + app.kubernetes.io/name: kepler + spec: + containers: + - name: kube-flannel + resources: + limits: + memory: 500Mi + cpu: 200m + requests: + cpu: 50m + memory: 100Mi + - name: install-cni-plugin + resources: + limits: + memory: 500Mi + cpu: 200m + requests: + cpu: 50m + memory: 100Mi + - name: install-cni + resources: + limits: + memory: 500Mi + cpu: 200m + requests: + cpu: 50m + memory: 100Mi + status: diff --git a/app/core/__fixture__/snapshot/multinode/k8s_deployments.yaml b/app/core/__fixture__/snapshot/multinode/k8s_deployments.yaml new file mode 100644 index 0000000..df93fdf --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_deployments.yaml @@ -0,0 +1,53 @@ +- apiVersion: apps/v1 + kind: Deployment + metadata: + name: coredns + namespace: kube-system + ownerReferences: + - apiVersion: minio.min.io/v2 + kind: CRD + name: crd-resource + labels: + k8s-app: kube-dns + spec: + replicas: 2 + selector: + matchLabels: + k8s-app: kube-dns + template: + metadata: + labels: + k8s-app: kube-dns + annotations: + glaciation-project.eu/resource/limits/energy: '100' + glaciation-project.eu/resource/limits/network: '1010' + glaciation-project.eu/resource/limits/gpu: '101' + glaciation-project.eu/resource/requests/energy: '100' + glaciation-project.eu/resource/requests/network: '1010' + glaciation-project.eu/resource/requests/gpu: '101' + spec: + initContainers: + - name: init-coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + containers: + - name: coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + - name: coredns-other + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + ephemeral-storage: 100500 + schedulerName: default-scheduler diff --git a/app/core/__fixture__/snapshot/multinode/k8s_jobs.yaml b/app/core/__fixture__/snapshot/multinode/k8s_jobs.yaml new file mode 100644 index 0000000..6887d90 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_jobs.yaml @@ -0,0 +1,19 @@ +- apiVersion: batch/v1 + kind: Job + metadata: + name: init-vault-cluster + namespace: vault + spec: + template: + metadata: + labels: + job-name: init-vault-cluster + spec: + containers: + - name: vault + resources: + limits: + memory: 200Mi + requests: + cpu: 100m + memory: 100Mi diff --git a/app/core/__fixture__/snapshot/multinode/k8s_nodes.yaml b/app/core/__fixture__/snapshot/multinode/k8s_nodes.yaml new file mode 100644 index 0000000..15d98e6 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_nodes.yaml @@ -0,0 +1,61 @@ +- apiVersion: v1 + kind: Node + metadata: + name: glaciation-test-master01 + annotations: + glaciation-project.eu/metric/node-energy-index: '1001' + spec: + status: + allocatable: + cpu: '4' + ephemeral-storage: '47266578354' + hugepages-1Gi: '0' + hugepages-2Mi: '0' + memory: 16283144Ki + pods: '110' + capacity: + cpu: '4' + ephemeral-storage: 51287520Ki + hugepages-1Gi: '0' + hugepages-2Mi: '0' + memory: 16385544Ki + pods: '110' + conditions: + - lastHeartbeatTime: '2024-02-27T10:37:54Z' + lastTransitionTime: '2024-02-13T13:53:43Z' + message: kubelet is posting ready status. AppArmor enabled + reason: KubeletReady + status: 'True' + type: Ready + nodeInfo: +- apiVersion: v1 + kind: Node + metadata: + name: glaciation-test-worker01 + annotations: + glaciation-project.eu/metric/node-energy-index: '1001' + spec: + status: + allocatable: + cpu: '4' + ephemeral-storage: '47266578354' + hugepages-1Gi: '0' + hugepages-2Mi: '0' + memory: 16283152Ki + pods: '110' + capacity: + cpu: '4' + ephemeral-storage: 51287520Ki + hugepages-1Gi: '0' + hugepages-2Mi: '0' + memory: 16385552Ki + pods: '110' + conditions: + - lastHeartbeatTime: '2024-02-27T10:36:23Z' + lastTransitionTime: '2024-02-23T06:11:02Z' + message: kubelet is posting ready status. AppArmor enabled + reason: KubeletReady + status: 'True' + type: Ready + images: + nodeInfo: diff --git a/app/core/__fixture__/snapshot/multinode/k8s_pods.yaml b/app/core/__fixture__/snapshot/multinode/k8s_pods.yaml new file mode 100644 index 0000000..59dfe04 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_pods.yaml @@ -0,0 +1,292 @@ +- kind: Pod + metadata: + name: coredns-787d4945fb-l85r5 + namespace: kube-system + ownerReferences: + - apiVersion: apps/v1 + kind: ReplicaSet + name: coredns-787d4945fb + spec: + containers: + - name: coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + nodeName: glaciation-test-master01 + schedulerName: default-scheduler + status: + conditions: + - lastProbeTime: + lastTransitionTime: '2024-02-13T13:53:43Z' + status: 'True' + type: Initialized + - lastProbeTime: + lastTransitionTime: '2024-02-13T15:09:49Z' + status: 'True' + type: Ready + - lastProbeTime: + lastTransitionTime: '2024-02-13T15:09:49Z' + status: 'True' + type: ContainersReady + - lastProbeTime: + lastTransitionTime: '2024-02-13T13:53:43Z' + status: 'True' + type: PodScheduled + containerStatuses: + - containerID: containerd://0ce09d1e8fdff70a58902bb3e73efafa76035dddbe9cec8b4115ac80d09f9963 + lastState: {} + name: coredns + ready: true + started: true + state: + running: + startedAt: '2024-02-13T13:53:44Z' + hostIP: 10.14.1.150 + phase: Running + startTime: '2024-02-13T13:53:43Z' +- kind: Pod + metadata: + name: tenant1-pool-0-1 + namespace: tenant1 + ownerReferences: + - apiVersion: apps/v1 + kind: StatefulSet + name: tenant1-pool-0 + spec: + containers: + - name: minio + resources: + requests: + cpu: '3' + memory: 7Gi + - name: sidecar + resources: {} + initContainers: + - name: validate-arguments + resources: {} + schedulerName: default-scheduler + status: + conditions: + - lastProbeTime: + lastTransitionTime: '2024-02-29T10:27:58Z' + message: '0/5 nodes are available: 5 node(s) didn''t find available persistent + volumes to bind. preemption: 0/5 nodes are available: 5 Preemption is not helpful + for scheduling..' + reason: Unschedulable + status: 'False' + type: PodScheduled + phase: Pending +- kind: Pod + metadata: + name: kube-flannel-ds-848v8 + namespace: kube-flannel + ownerReferences: + - apiVersion: apps/v1 + kind: DaemonSet + name: kube-flannel-ds + spec: + containers: + - name: kube-flannel + resources: + requests: + cpu: 100m + memory: 50Mi + initContainers: + - name: install-cni-plugin + resources: {} + - name: install-cni + resources: {} + nodeName: glaciation-test-worker01 + schedulerName: default-scheduler + status: + conditions: + - lastProbeTime: + lastTransitionTime: '2023-10-20T11:02:00Z' + status: 'True' + type: Initialized + - lastProbeTime: + lastTransitionTime: '2023-12-11T11:10:17Z' + status: 'True' + type: Ready + - lastProbeTime: + lastTransitionTime: '2023-12-11T11:10:17Z' + status: 'True' + type: ContainersReady + - lastProbeTime: + lastTransitionTime: '2023-10-20T11:01:49Z' + status: 'True' + type: PodScheduled + containerStatuses: + - containerID: containerd://b23fcc9919d7e58fdeb4a729ccf2363c31429e9906a836f55e7d3bf61c908cdb + lastState: + terminated: + containerID: containerd://b516bf9221202a26438901dde36ae814743f9acad63006be2a37ab69f69b4747 + exitCode: 255 + finishedAt: '2023-12-11T11:07:10Z' + reason: Unknown + startedAt: '2023-10-24T11:40:54Z' + name: kube-flannel + ready: true + started: true + state: + running: + startedAt: '2023-12-11T11:10:16Z' + initContainerStatuses: + - containerID: containerd://c45d8a7c2b94d7bfd07f9343366b27b7bdced1cb6b07c0f36d4e813bc7f48a74 + lastState: {} + name: install-cni-plugin + ready: true + started: false + state: + terminated: + containerID: containerd://c45d8a7c2b94d7bfd07f9343366b27b7bdced1cb6b07c0f36d4e813bc7f48a74 + exitCode: 0 + finishedAt: '2023-12-11T11:10:14Z' + reason: Completed + startedAt: '2023-12-11T11:10:14Z' + - containerID: containerd://b3e5761de29cd8e7a2612a59c48e49882907acca959c9dc54721a43d86c4c69f + lastState: {} + name: install-cni + ready: true + started: false + state: + terminated: + containerID: containerd://b3e5761de29cd8e7a2612a59c48e49882907acca959c9dc54721a43d86c4c69f + exitCode: 0 + finishedAt: '2023-12-11T11:10:15Z' + reason: Completed + startedAt: '2023-12-11T11:10:15Z' + phase: Running + startTime: '2023-10-20T11:01:50Z' +- kind: Pod + metadata: + name: init-vault-cluster-cbqhq + namespace: vault + ownerReferences: + - apiVersion: batch/v1 + kind: Job + name: init-vault-cluster + spec: + containers: + - name: vault + resources: {} + nodeName: glaciation-test-worker01 + schedulerName: default-scheduler + status: + conditions: + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:37:57Z" + reason: PodCompleted + status: "True" + type: Initialized + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:39:02Z" + reason: PodCompleted + status: "False" + type: Ready + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:39:02Z" + reason: PodCompleted + status: "False" + type: ContainersReady + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:37:57Z" + status: "True" + type: PodScheduled + containerStatuses: + - containerID: containerd://8d6b55a900568fce8346a5fffb66e7d5dca809f676a15fb6d0f54feb8399ea39 + lastState: {} + name: vault + ready: false + started: false + state: + terminated: + containerID: containerd://8d6b55a900568fce8346a5fffb66e7d5dca809f676a15fb6d0f54feb8399ea39 + exitCode: 0 + finishedAt: "2024-05-17T11:39:02Z" + reason: Completed + startedAt: "2024-05-17T11:38:09Z" + phase: Succeeded + startTime: "2024-05-17T11:37:57Z" +- kind: Pod + metadata: + name: glaciation-pool-0-0 + namespace: minio-tenant + ownerReferences: + - kind: StatefulSet + name: glaciation-pool-0 + spec: + containers: + - name: minio + resources: + limits: + cpu: "2" + memory: 4Gi + requests: + cpu: "1" + memory: 2Gi + - name: sidecar + resources: {} + hostname: glaciation-pool-0-0 + initContainers: + - name: validate-arguments + resources: {} + nodeName: glaciation-test-master01 + status: + conditions: + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:54:19Z" + status: "True" + type: Initialized + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:55:54Z" + status: "True" + type: Ready + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:55:54Z" + status: "True" + type: ContainersReady + - lastProbeTime: null + lastTransitionTime: "2024-05-17T11:53:14Z" + status: "True" + type: PodScheduled + containerStatuses: + - containerID: containerd://cd294ba419c33b64d33b24583c0f64ef320560942872cf206750d69a11db7ba6 + lastState: + terminated: + containerID: containerd://930db39b4c91e0ed274fac866713039ed0e12ed8d179bbb39c428d5581555252 + exitCode: 1 + finishedAt: "2024-05-17T11:55:06Z" + reason: Error + startedAt: "2024-05-17T11:55:05Z" + name: minio + ready: true + started: true + state: + running: + startedAt: "2024-05-17T11:55:54Z" + - containerID: containerd://68ecab8b10b91093bfd72f40c93b9b1545f9b5963e5a4b9b0bcea887be36f7fc + lastState: {} + name: sidecar + ready: true + started: true + state: + running: + startedAt: "2024-05-17T11:54:19Z" + initContainerStatuses: + - containerID: containerd://1b107c003c626739a964d4489f8aa7c8eeba5c9df22f7779ca7bd0ea6957ad6d + lastState: {} + name: validate-arguments + ready: true + state: + terminated: + containerID: containerd://1b107c003c626739a964d4489f8aa7c8eeba5c9df22f7779ca7bd0ea6957ad6d + exitCode: 0 + finishedAt: "2024-05-17T11:54:19Z" + reason: Completed + startedAt: "2024-05-17T11:54:19Z" + phase: Running + startTime: "2024-05-17T11:53:14Z" diff --git a/app/core/__fixture__/snapshot/multinode/k8s_replicasets.yaml b/app/core/__fixture__/snapshot/multinode/k8s_replicasets.yaml new file mode 100644 index 0000000..2d0ad77 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_replicasets.yaml @@ -0,0 +1,25 @@ +- apiVersion: apps/v1 + kind: ReplicaSet + metadata: + name: coredns-787d4945fb + namespace: kube-system + ownerReferences: + - apiVersion: apps/v1 + kind: Deployment + name: coredns + spec: + template: + metadata: + labels: + k8s-app: kube-dns + pod-template-hash: 787d4945fb + spec: + containers: + - name: coredns + resources: + limits: + memory: 170Mi + requests: + cpu: 100m + memory: 70Mi + status: diff --git a/app/core/__fixture__/snapshot/multinode/k8s_statefullsets.yaml b/app/core/__fixture__/snapshot/multinode/k8s_statefullsets.yaml new file mode 100644 index 0000000..abccdb5 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_statefullsets.yaml @@ -0,0 +1,30 @@ +- apiVersion: apps/v1 + kind: StatefulSet + metadata: + name: glaciation-pool-0 + namespace: tenant1 + ownerReferences: + - apiVersion: minio.min.io/v2 + kind: Tenant + name: tenant1 + spec: + template: + metadata: + spec: + containers: + - name: minio + resources: + requests: + cpu: '3' + memory: 7Gi + - name: sidecar + resources: + requests: + cpu: '0.5' + memory: 1Gi + initContainers: + - name: validate-arguments + resources: + schedulerName: default-scheduler + volumeClaimTemplates: [] + status: diff --git a/app/core/__fixture__/snapshot/multinode/metric_nodes.yaml b/app/core/__fixture__/snapshot/multinode/metric_nodes.yaml new file mode 100644 index 0000000..4ec62d0 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/metric_nodes.yaml @@ -0,0 +1,117 @@ +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: test1 + result_parser: SimpleResultParser + value: + metric_id: cpu_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 41.0 +- query: + measurement_id: Usage + subresource: GPU + source: dcgm + unit: coreseconds + property: GPU.Usage + query: test2 + result_parser: SimpleResultParser + value: + metric_id: gpu_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 41.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: test3 + result_parser: SimpleResultParser + value: + metric_id: ram_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 42.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytes + property: Storage.Usage + query: test4 + result_parser: SimpleResultParser + value: + metric_id: eph_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 43.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes5 + property: Network.Usage + query: test + result_parser: SimpleResultParser + value: + metric_id: net_usage + resource_id: glaciation-test-master01 + timestamp: 17100500 + value: 43.0 +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: test6 + result_parser: SimpleResultParser + value: + metric_id: cpu_usage + resource_id: glaciation-test-worker01 + timestamp: 17100500 + value: 41.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: test7 + result_parser: SimpleResultParser + value: + metric_id: ram_usage + resource_id: glaciation-test-worker01 + timestamp: 17100500 + value: 42.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytes + property: Storage.Usage + query: test8 + result_parser: SimpleResultParser + value: + metric_id: eph_usage + resource_id: glaciation-test-worker01 + timestamp: 17100500 + value: 43.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes9 + property: Network.Usage + query: test + result_parser: SimpleResultParser + value: + metric_id: net_usage + resource_id: glaciation-test-worker01 + timestamp: 17100500 + value: 43.0 diff --git a/app/core/__fixture__/snapshot/multinode/metric_pods.yaml b/app/core/__fixture__/snapshot/multinode/metric_pods.yaml new file mode 100644 index 0000000..bc3352d --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/metric_pods.yaml @@ -0,0 +1,212 @@ +# coredns-787d4945fb-l85r5 +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: pods1 + result_parser: SimpleResultParser + value: + metric_id: pod_cpu_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 41.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: pods2 + result_parser: SimpleResultParser + value: + metric_id: pod_ram_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 42.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytes + property: Storage.Usage + query: pods3 + result_parser: SimpleResultParser + value: + metric_id: pod_eph_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 43.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes + property: Network.Usage + query: pods4 + result_parser: SimpleResultParser + value: + metric_id: pod_net_usage + resource_id: coredns-787d4945fb-l85r5 + timestamp: 17100500 + value: 44.0 +# kube-flannel-ds-848v8 +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: pods5 + result_parser: SimpleResultParser + value: + metric_id: pod_cpu_usage + resource_id: kube-flannel-ds-848v8 + timestamp: 17100500 + value: 51.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: pods6 + result_parser: SimpleResultParser + value: + metric_id: pod_ram_usage + resource_id: kube-flannel-ds-848v8 + timestamp: 17100500 + value: 52.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytes + property: Storage.Usage + query: pods7 + result_parser: SimpleResultParser + value: + metric_id: pod_eph_usage + resource_id: kube-flannel-ds-848v8 + timestamp: 17100500 + value: 53.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes + property: Network.Usage + query: pods8 + result_parser: SimpleResultParser + value: + metric_id: pod_net_usage + resource_id: kube-flannel-ds-848v8 + timestamp: 17100500 + value: 54.0 +# init-vault-cluster-cbqhq +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: pods9 + result_parser: SimpleResultParser + value: + metric_id: pod_cpu_usage + resource_id: init-vault-cluster-cbqhq + timestamp: 17100500 + value: 61.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: pods10 + result_parser: SimpleResultParser + value: + metric_id: pod_ram_usage + resource_id: init-vault-cluster-cbqhq + timestamp: 17100500 + value: 62.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytes + property: Storage.Usage + query: pods11 + result_parser: SimpleResultParser + value: + metric_id: pod_eph_usage + resource_id: init-vault-cluster-cbqhq + timestamp: 17100500 + value: 63.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes + property: Network.Usage + query: pods12 + result_parser: SimpleResultParser + value: + metric_id: pod_net_usage + resource_id: init-vault-cluster-cbqhq + timestamp: 17100500 + value: 64.0 +# glaciation-pool-0-0 +- query: + measurement_id: Usage + subresource: CPU + source: cAdvisor + unit: coreseconds + property: CPU.Usage + query: pods13 + result_parser: SimpleResultParser + value: + metric_id: pod_cpu_usage + resource_id: glaciation-pool-0-0 + timestamp: 17100500 + value: 71.0 +- query: + measurement_id: Usage + subresource: RAM + source: cAdvisor + unit: bytes + property: RAM.Usage + query: pods14 + result_parser: SimpleResultParser + value: + metric_id: pod_ram_usage + resource_id: glaciation-pool-0-0 + timestamp: 17100500 + value: 72.0 +- query: + measurement_id: Usage + subresource: Storage + source: cAdvisor + unit: bytes + property: Storage.Usage + query: pods15 + result_parser: SimpleResultParser + value: + metric_id: pod_eph_usage + resource_id: glaciation-pool-0-0 + timestamp: 17100500 + value: 73.0 +- query: + measurement_id: Usage + subresource: Network + source: cAdvisor + unit: bytes + property: Network.Usage + query: pods16 + result_parser: SimpleResultParser + value: + metric_id: pod_net_usage + resource_id: glaciation-pool-0-0 + timestamp: 17100500 + value: 74.0 diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 70eb094..7c87138 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -23,6 +23,7 @@ class QuerySettings(BaseSettings): class KGBuilderSettings(BaseSettings): builder_tick_seconds: int + node_port: int queries: QuerySettings @@ -51,7 +52,7 @@ def __init__( self.kg_repository = kg_repository self.influxdb_repository = influxdb_repository self.settings = settings - self.slice_strategy = SliceForNodeStrategy() + self.slice_strategy = SliceForNodeStrategy(node_port=settings.node_port) self.slice_assembler = KGSliceAssembler() async def run(self) -> None: diff --git a/app/core/slice_for_node_strategy.py b/app/core/slice_for_node_strategy.py index ad64cf6..72c9789 100644 --- a/app/core/slice_for_node_strategy.py +++ b/app/core/slice_for_node_strategy.py @@ -1,13 +1,113 @@ -from typing import Dict +from typing import Any, Dict, List, Optional, Tuple, TypeAlias + +from jsonpath_ng.ext import parse from app.clients.k8s.k8s_client import ResourceSnapshot from app.core.slice_strategy import SliceStrategy from app.core.types import KGSliceId, MetricSnapshot, SliceInputs +ReferenceKind: TypeAlias = str + class SliceForNodeStrategy(SliceStrategy): + node_port: int + + def __init__(self, node_port: int): + self.node_port = node_port + def get_slices( self, resources: ResourceSnapshot, metrics: MetricSnapshot ) -> Dict[KGSliceId, SliceInputs]: - slice_id = KGSliceId("127.0.0.1", 80) - return {slice_id: SliceInputs(resources, metrics)} + result: Dict[KGSliceId, SliceInputs] = dict() + + for node in resources.nodes: + slice_id, inputs = self.split_node(node, resources, metrics) + result[slice_id] = inputs + + return result + + def split_node( + self, + node: Dict[str, Any], + src_resources: ResourceSnapshot, + src_metrics: MetricSnapshot, + ) -> Tuple[KGSliceId, SliceInputs]: + node_hostname = self.get_resource_name(node) + slice_id = KGSliceId(node_hostname, self.node_port) + + slice_resources = ResourceSnapshot(cluster=src_resources.cluster, nodes=[node]) + slice_metrics = MetricSnapshot() + + self.add_workloads(node_hostname, slice_resources, src_resources) + self.add_metrics(slice_resources, slice_metrics, src_metrics) + + return slice_id, SliceInputs(slice_resources, slice_metrics) + + def get_resource_name(self, resource: Dict[str, Any]) -> str: + for match in parse("$.metadata.name").find(resource): + return str(match.value) + raise Exception("Metadata does not contain name.") + + def add_workloads( + self, + node_hostname: str, + slice_resources: ResourceSnapshot, + src_resources: ResourceSnapshot, + ) -> None: + for pod in src_resources.pods: + hostname = self.get_pod_hostname(pod) + if not hostname or hostname != node_hostname: + continue + slice_resources.pods.append(pod) + self.add_parent_resources(pod, slice_resources, src_resources) + + def add_parent_resources( + self, + resource: Dict[str, Any], + slice_resources: ResourceSnapshot, + src_resources: ResourceSnapshot, + ) -> None: + for parent_kind, parent_identity in self.get_owner_references(resource): + src_found_resources = src_resources.find_resources_by_kind_and_identity( + parent_kind, parent_identity + ) + slice_resources.add_resources_by_kind(parent_kind, src_found_resources) + for found_resource in src_found_resources: + self.add_parent_resources( + found_resource, slice_resources, src_resources + ) + + def get_owner_references( + self, resource: Dict[str, Any] + ) -> List[Tuple[ReferenceKind, str]]: + references_match = parse("$.metadata.ownerReferences").find(resource) + if len(references_match) == 0: + return [] + return [ + self.get_reference_id(reference_match) + for reference_match in references_match[0].value + ] + + def get_pod_hostname(self, pod: Dict[str, Any]) -> Optional[str]: + for match in parse("$.spec.nodeName").find(pod): + return str(match.value) + return None + + def get_reference_id(self, reference: Dict[str, Any]) -> Tuple[str, str]: + return reference["kind"], reference["name"] + + def add_metrics( + self, + slice_resources: ResourceSnapshot, + slice_metrics: MetricSnapshot, + src_metrics: MetricSnapshot, + ) -> None: + for node in slice_resources.nodes: + node_name = self.get_resource_name(node) + metrics = src_metrics.get_node_metrics_by_resource(node_name) + slice_metrics.node_metrics.extend(metrics) + + for pod in slice_resources.pods: + pod_name = self.get_resource_name(pod) + metrics = src_metrics.get_pod_metrics_by_resource(pod_name) + slice_metrics.pod_metrics.extend(metrics) diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index 42a4cee..186c2ce 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -12,11 +12,12 @@ from app.core.kg_repository import KGRepository from app.core.metric_repository import MetricRepository from app.core.test_graph_fixture import TestGraphFixture +from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import DKGSlice, KGSliceId from app.kg.inmemory_graph import InMemoryGraph -class KGBuilderTest(TestCase, TestGraphFixture): +class KGBuilderTest(TestCase, TestGraphFixture, SnapshotTestBase): client: MockMetadataServiceClient k8s_client: MockK8SClient influxdb_client: MockInfluxDBClient @@ -34,27 +35,40 @@ def setUp(self) -> None: self.running_event.set() self.runner = asyncio.Runner() self.settings = KGBuilderSettings( - builder_tick_seconds=1, queries=QuerySettings() + builder_tick_seconds=1, node_port=80, queries=QuerySettings() ) - def test_build(self) -> None: + def test_build_minimal(self) -> None: + self.mock_minimal_inputs() + builder = self.create_builder() self.runner.run(self.run_builder(builder)) slice = self.wait_for_slice(2) - self.assertEqual(slice.graph, InMemoryGraph()) self.assertEqual(slice.timestamp, 0) - self.assertEqual(slice.slice_id, KGSliceId("127.0.0.1", 80)) + self.assertEqual(slice.slice_id, KGSliceId("glaciation-test-master01", 80)) + self.assertNotEqual(slice.graph, InMemoryGraph()) - def wait_for_slice(self, seconds: int) -> DKGSlice: - start = datetime.datetime.now() - while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): - slice = self.queue.get_nowait() - if slice: - return slice - self.runner.run(asyncio.sleep(0.1)) - raise AssertionError("time is up.") + # TODO validate graph nodes and edges + + def mock_minimal_inputs(self) -> None: + resources = self.load_k8s_snapshot("minimal") + + self.k8s_client.mock_cluster(resources.cluster) + self.k8s_client.mock_daemonsets(resources.daemonsets) + self.k8s_client.mock_deployments(resources.deployments) + self.k8s_client.mock_jobs(resources.jobs) + self.k8s_client.mock_nodes(resources.nodes) + self.k8s_client.mock_pods(resources.pods) + self.k8s_client.mock_replicasets(resources.replicasets) + self.k8s_client.mock_statefullsets(resources.statefullsets) + + metrics = self.load_metric_snapshot("minimal") + for query, value in metrics.node_metrics: + self.influxdb_client.mock_query(query.query, [value]) + for query, value in metrics.pod_metrics: + self.influxdb_client.mock_query(query.query, [value]) def create_builder(self) -> KGBuilder: repository = KGRepository(self.client, self.get_jsonld_config()) @@ -68,5 +82,14 @@ def create_builder(self) -> KGBuilder: self.settings, ) + def wait_for_slice(self, seconds: int) -> DKGSlice: + start = datetime.datetime.now() + while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): + slice = self.queue.get_nowait() + if slice: + return slice + self.runner.run(asyncio.sleep(0.1)) + raise AssertionError("time is up.") + async def run_builder(self, builder: KGBuilder) -> None: asyncio.create_task(builder.run()) diff --git a/app/core/test_kgslice_assembler.py b/app/core/test_kgslice_assembler.py index 64da479..8ce9651 100644 --- a/app/core/test_kgslice_assembler.py +++ b/app/core/test_kgslice_assembler.py @@ -1,21 +1,38 @@ from unittest import TestCase -from app.clients.k8s.k8s_client import ResourceSnapshot from app.core.kg_slice_assembler import KGSliceAssembler -from app.core.types import KGSliceId, MetricSnapshot, SliceInputs +from app.core.test_snapshot_base import SnapshotTestBase +from app.core.types import DKGSlice, KGSliceId +from app.kg.inmemory_graph import InMemoryGraph -class KGSliceAssemblerTest(TestCase): +class KGSliceAssemblerTest(TestCase, SnapshotTestBase): def test_assemble_empty(self) -> None: now = 1 slice_id = KGSliceId("127.0.0.1", 80) - resource_snapshot = ResourceSnapshot() - metric_snapshot = MetricSnapshot() - inputs = SliceInputs(resource_snapshot, metric_snapshot) + inputs = self.get_inputs("empty") assembler = KGSliceAssembler() - assembler.assemble( + actual = assembler.assemble( now, slice_id, inputs, ) + self.assertEqual(DKGSlice(slice_id, InMemoryGraph(), now), actual) + + def test_assemble_minimal(self) -> None: + pass + # now = 1 + # slice_id = KGSliceId("127.0.0.1", 80) + # inputs = self.get_inputs("minimal") + # assembler = KGSliceAssembler() + + # actual = assembler.assemble( + # now, + # slice_id, + # inputs, + # ) + # self.assertEqual(DKGSlice(slice_id, InMemoryGraph(), now), actual) + + def test_assemble_multinode(self) -> None: + pass diff --git a/app/core/test_slice_for_node_strategy.py b/app/core/test_slice_for_node_strategy.py index 852cddf..ce0adce 100644 --- a/app/core/test_slice_for_node_strategy.py +++ b/app/core/test_slice_for_node_strategy.py @@ -1,6 +1,118 @@ from unittest import TestCase +from app.core.slice_for_node_strategy import SliceForNodeStrategy +from app.core.test_snapshot_base import SnapshotTestBase +from app.core.types import KGSliceId -class SliceForNodeStrategyTest(TestCase): - def test_slice(self) -> None: - pass + +class SliceForNodeStrategyTest(TestCase, SnapshotTestBase): + def test_split_empty(self) -> None: + strategy = SliceForNodeStrategy(node_port=80) + resources = self.load_k8s_snapshot("empty") + metrics = self.load_metric_snapshot("empty") + actual = strategy.get_slices(resources, metrics) + self.assertEqual({}, actual) + + def test_split_minimal(self) -> None: + strategy = SliceForNodeStrategy(node_port=80) + resources = self.load_k8s_snapshot("minimal") + metrics = self.load_metric_snapshot("minimal") + actual = strategy.get_slices(resources, metrics) + + expected_slice_id = KGSliceId(node_ip="glaciation-test-master01", port=80) + self.assertEqual({expected_slice_id}, actual.keys()) + + inputs = actual[expected_slice_id] + actual_names = inputs.resource.get_resource_names() + self.assertEqual( + { + "glaciation-test-master01", + "coredns", + "coredns-787d4945fb", + "coredns-787d4945fb-l85r5", + }, + actual_names, + ) + + actual_metric_names = inputs.metrics.get_metric_names() + self.assertEqual( + { + "eph_usage", + "cpu_usage", + "net_usage", + "ram_usage", + "pod_net_usage", + "pod_cpu_usage", + "pod_ram_usage", + "pod_eph_usage", + }, + actual_metric_names, + ) + + def test_split_multinode(self) -> None: + strategy = SliceForNodeStrategy(node_port=80) + resources = self.load_k8s_snapshot("multinode") + metrics = self.load_metric_snapshot("multinode") + actual = strategy.get_slices(resources, metrics) + + expected_slice_id1 = KGSliceId(node_ip="glaciation-test-master01", port=80) + expected_slice_id2 = KGSliceId(node_ip="glaciation-test-worker01", port=80) + self.assertEqual({expected_slice_id1, expected_slice_id2}, actual.keys()) + + inputs = actual[expected_slice_id1] + actual_names = inputs.resource.get_resource_names() + self.assertEqual( + { + "glaciation-test-master01", + "coredns", + "coredns-787d4945fb", + "coredns-787d4945fb-l85r5", + "glaciation-pool-0-0", + "glaciation-pool-0", + }, + actual_names, + ) + + actual_metric_names = inputs.metrics.get_metric_names() + self.assertEqual( + { + "eph_usage", + "cpu_usage", + "net_usage", + "ram_usage", + "gpu_usage", + "pod_net_usage", + "pod_cpu_usage", + "pod_ram_usage", + "pod_eph_usage", + }, + actual_metric_names, + ) + + inputs = actual[expected_slice_id2] + actual_names = inputs.resource.get_resource_names() + self.assertEqual( + { + "glaciation-test-worker01", + "kube-flannel-ds-848v8", + "kube-flannel-ds", + "init-vault-cluster", + "init-vault-cluster-cbqhq", + }, + actual_names, + ) + + actual_metric_names = inputs.metrics.get_metric_names() + self.assertEqual( + { + "eph_usage", + "cpu_usage", + "net_usage", + "ram_usage", + "pod_net_usage", + "pod_cpu_usage", + "pod_ram_usage", + "pod_eph_usage", + }, + actual_metric_names, + ) diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py new file mode 100644 index 0000000..8049c14 --- /dev/null +++ b/app/core/test_snapshot_base.py @@ -0,0 +1,73 @@ +from typing import Any, Dict, List, Tuple + +import dataclasses +import os.path +from io import FileIO + +import yaml + +from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.metric_repository import MetricQuery +from app.core.metric_value import MetricValue +from app.core.types import MetricSnapshot, SliceInputs + + +class SnapshotTestBase: + SNAPSHOT_ROOT: str = "app/core/__fixture__/snapshot" + + def get_inputs(self, identity: str) -> SliceInputs: + resource_snapshot = self.load_k8s_snapshot(identity) + metric_snapshot = self.load_metric_snapshot(identity) + inputs = SliceInputs(resource_snapshot, metric_snapshot) + return inputs + + def load_k8s_snapshot(self, snapshot_id: str) -> ResourceSnapshot: + return ResourceSnapshot( + cluster=self.load_yaml(snapshot_id, "k8s_cluster"), # type: ignore + pods=self.load_yaml(snapshot_id, "k8s_pods"), + nodes=self.load_yaml(snapshot_id, "k8s_nodes"), + deployments=self.load_yaml(snapshot_id, "k8s_deployments"), + jobs=self.load_yaml(snapshot_id, "k8s_jobs"), + statefullsets=self.load_yaml(snapshot_id, "k8s_statefullsets"), + daemonsets=self.load_yaml(snapshot_id, "k8s_daemonsets"), + replicasets=self.load_yaml(snapshot_id, "k8s_replicasets"), + ) + + def load_metric_snapshot(self, snapshot_id: str) -> MetricSnapshot: + return MetricSnapshot( + pod_metrics=self.load_metrics(snapshot_id, "metric_pods"), + node_metrics=self.load_metrics(snapshot_id, "metric_nodes"), + ) + + def load_yaml(self, snapshot_id: str, file_id: str) -> List[Dict[str, Any]]: + file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/{file_id}.yaml" + if not os.path.exists(file_path): + return [] + return self.safe_load_yaml(file_path) # type: ignore + + def load_metrics( + self, snapshot_id: str, file_id: str + ) -> List[Tuple[MetricQuery, MetricValue]]: + file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/{file_id}.yaml" + if not os.path.exists(file_path): + return [] + query_and_values: List[Dict[str, Any]] = self.safe_load_yaml(file_path) + result = [] + for query_and_value in query_and_values: + query = self.dataclass_from_dict(MetricQuery, query_and_value["query"]) + value = self.dataclass_from_dict(MetricValue, query_and_value["value"]) + result.append((query, value)) + return result + + def safe_load_yaml(self, file_path: str) -> Any: + with FileIO(file_path) as f: + return yaml.safe_load(f) + + def dataclass_from_dict(self, klass, d): + try: + fieldtypes = {f.name: f.type for f in dataclasses.fields(klass)} + return klass( + **{f: self.dataclass_from_dict(fieldtypes[f], d[f]) for f in d} + ) + except Exception: + return d # Not a dataclass field diff --git a/app/core/types.py b/app/core/types.py index 3054707..631ea16 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -1,4 +1,4 @@ -from typing import List, Tuple +from typing import List, Set, Tuple from dataclasses import dataclass, field @@ -40,10 +40,35 @@ class Metric: class MetricSnapshot: pod_metrics: List[Tuple[MetricQuery, MetricValue]] = field(default_factory=list) node_metrics: List[Tuple[MetricQuery, MetricValue]] = field(default_factory=list) + # TODO remove workload_metrics workload_metrics: List[Tuple[MetricQuery, MetricValue]] = field( default_factory=list ) + def get_metric_names(self) -> Set[str]: + names: Set[str] = set() + names = {*names, *{resource[1].metric_id for resource in self.pod_metrics}} + names = {*names, *{resource[1].metric_id for resource in self.node_metrics}} + return names + + def get_node_metrics_by_resource( + self, resource_name: str + ) -> List[Tuple[MetricQuery, MetricValue]]: + return [ + metric + for metric in self.node_metrics + if metric[1].resource_id == resource_name + ] + + def get_pod_metrics_by_resource( + self, resource_name: str + ) -> List[Tuple[MetricQuery, MetricValue]]: + return [ + metric + for metric in self.pod_metrics + if metric[1].resource_id == resource_name + ] + @dataclass class SliceInputs: diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index 80b11d1..f83561b 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -47,7 +47,9 @@ def get_settings(self) -> KGExporterSettings: ) def parse(self): - parser = argparse.ArgumentParser(description="Kubernetes watcher service") + parser = argparse.ArgumentParser( + description="Kubernetes knowledge graph exporter." + ) parser.add_argument( "--config", dest="config", diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 76283a3..98dd6db 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -23,7 +23,9 @@ def test_start(self): influxdb_client = MockInfluxDBClient() jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) settings = KGExporterSettings( - builder=KGBuilderSettings(builder_tick_seconds=1, queries=QuerySettings()), + builder=KGBuilderSettings( + builder_tick_seconds=1, node_port=80, queries=QuerySettings() + ), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( url="test", token="token", org="org", timeout=60000 diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py index 7071c20..cd16825 100644 --- a/app/test_pydantic_yaml.py +++ b/app/test_pydantic_yaml.py @@ -14,7 +14,9 @@ class PyDanticYamlTest(TestCase): def test_dump_load_settings(self): expected = KGExporterSettings( - builder=KGBuilderSettings(builder_tick_seconds=1, queries=QuerySettings()), + builder=KGBuilderSettings( + builder_tick_seconds=1, node_port=80, queries=QuerySettings() + ), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( url="test", token="token", org="org", timeout=60000 From ef706251a1b47863d61e55537ce059c99c981cd2 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 13 Jun 2024 13:40:04 +0200 Subject: [PATCH 24/61] HHT-669: Slicing Strategy tests --- app/kgexporter_context_builder.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index f83561b..176df61 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -38,7 +38,9 @@ def build(self) -> KGExporterContext: def get_settings(self) -> KGExporterSettings: return KGExporterSettings( - builder=KGBuilderSettings(builder_tick_seconds=1, queries=QuerySettings()), + builder=KGBuilderSettings( + builder_tick_seconds=1, node_port=80, queries=QuerySettings() + ), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( url="test", token="token", org="org", timeout=60000 From 83263004d7aa28420707559279e5a157f6b455eb Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 13 Jun 2024 13:49:17 +0200 Subject: [PATCH 25/61] HHT-669: KG assembler tests --- .../graph_glaciation-test-master01_80.jsonld | 1 + .../snapshot/minimal/metric_nodes.yaml | 2 +- app/core/kg_slice_assembler.py | 12 +++++ app/core/test_kgslice_assembler.py | 34 +++++++------- app/core/test_snapshot_base.py | 44 ++++++++++++++++++- app/kg/iri.py | 3 ++ 6 files changed, 78 insertions(+), 18 deletions(-) create mode 100644 app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld diff --git a/app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld @@ -0,0 +1 @@ +{} diff --git a/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml b/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml index 1a2c876..8ac25ba 100644 --- a/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml +++ b/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml @@ -28,7 +28,7 @@ measurement_id: Usage subresource: Storage source: cAdvisor - unit: bytesß + unit: bytes property: Storage.Usage query: test3 result_parser: SimpleResultParser diff --git a/app/core/kg_slice_assembler.py b/app/core/kg_slice_assembler.py index 3a8caf4..3790add 100644 --- a/app/core/kg_slice_assembler.py +++ b/app/core/kg_slice_assembler.py @@ -2,6 +2,7 @@ from app.clients.k8s.k8s_client import ResourceSnapshot from app.core.types import DKGSlice, KGSliceId, MetricSnapshot, SliceInputs +from app.k8s_transform.cluster_transformer import ClusterToRDFTransformer from app.k8s_transform.node_transformer import NodesToRDFTransformer from app.k8s_transform.pod_transformer import PodToRDFTransformer from app.k8s_transform.transformation_context import TransformationContext @@ -32,6 +33,7 @@ def transform_resources( self, now: int, snapshot: ResourceSnapshot, sink: Graph ) -> None: context = TransformationContext(now) + self.transform_cluster(sink, snapshot.nodes, snapshot.cluster, context) self.transform_resource(sink, snapshot.nodes, context, NodesToRDFTransformer) self.transform_resource(sink, snapshot.pods, context, PodToRDFTransformer) self.transform_resource( @@ -48,6 +50,16 @@ def transform_resources( sink, snapshot.statefullsets, context, WorkloadToRDFTransformer ) + def transform_cluster( + self, + sink: Graph, + nodes: List[Dict[str, Any]], + cluster_info: Dict[str, Any], + context: TransformationContext, + ) -> None: + transformer = ClusterToRDFTransformer(cluster_info, nodes, sink) + transformer.transform(context) + def transform_resource( self, sink: Graph, diff --git a/app/core/test_kgslice_assembler.py b/app/core/test_kgslice_assembler.py index 8ce9651..fac902b 100644 --- a/app/core/test_kgslice_assembler.py +++ b/app/core/test_kgslice_assembler.py @@ -1,9 +1,12 @@ +from typing import List, Set + from unittest import TestCase from app.core.kg_slice_assembler import KGSliceAssembler from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import DKGSlice, KGSliceId from app.kg.inmemory_graph import InMemoryGraph +from app.kg.iri import IRI class KGSliceAssemblerTest(TestCase, SnapshotTestBase): @@ -21,18 +24,19 @@ def test_assemble_empty(self) -> None: self.assertEqual(DKGSlice(slice_id, InMemoryGraph(), now), actual) def test_assemble_minimal(self) -> None: - pass - # now = 1 - # slice_id = KGSliceId("127.0.0.1", 80) - # inputs = self.get_inputs("minimal") - # assembler = KGSliceAssembler() - - # actual = assembler.assemble( - # now, - # slice_id, - # inputs, - # ) - # self.assertEqual(DKGSlice(slice_id, InMemoryGraph(), now), actual) - - def test_assemble_multinode(self) -> None: - pass + now = 1 + slice_id = KGSliceId("glaciation-test-master01", 80) + inputs = self.get_inputs("minimal") + assembler = KGSliceAssembler() + + actual = assembler.assemble( + now, + slice_id, + inputs, + ) + self.assertEqual(slice_id, actual.slice_id) + self.assertEqual(now, actual.timestamp) + self.assert_graph(actual.graph, "minimal", slice_id) + + def to_value(self, ids: List[IRI]) -> Set[str]: + return {iri.get_value() for iri in ids} diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index 8049c14..19f71a7 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -1,15 +1,22 @@ from typing import Any, Dict, List, Tuple import dataclasses +import json import os.path -from io import FileIO +from io import FileIO, StringIO import yaml from app.clients.k8s.k8s_client import ResourceSnapshot from app.core.metric_repository import MetricQuery from app.core.metric_value import MetricValue -from app.core.types import MetricSnapshot, SliceInputs +from app.core.types import KGSliceId, MetricSnapshot, SliceInputs +from app.k8s_transform.upper_ontology_base import UpperOntologyBase +from app.kg.graph import Graph +from app.kg.id_base import IdBase +from app.kg.iri import IRI +from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.serialize.jsonld_serializer import JsonLDSerialializer class SnapshotTestBase: @@ -63,6 +70,10 @@ def safe_load_yaml(self, file_path: str) -> Any: with FileIO(file_path) as f: return yaml.safe_load(f) + def load_json(self, file_path: str) -> Dict[str, Any]: + with FileIO(file_path) as f: + return json.load(f) # type: ignore + def dataclass_from_dict(self, klass, d): try: fieldtypes = {f.name: f.type for f in dataclasses.fields(klass)} @@ -71,3 +82,32 @@ def dataclass_from_dict(self, klass, d): ) except Exception: return d # Not a dataclass field + + def assert_graph(self, graph: Graph, snapshot_id: str, slice_id: KGSliceId) -> None: + file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/graph_{slice_id.node_ip}_{slice_id.port}.jsonld" + node_jsonld = self.load_json(file_path) + + buffer = StringIO() + JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) + + self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) # type: ignore + + def get_jsonld_config(self) -> JsonLDConfiguration: + contexts: Dict[IdBase, Dict[str, Any]] = { + JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + } + } + return JsonLDConfiguration( + contexts, + { + IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasuringResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementUnit"), + }, + ) diff --git a/app/kg/iri.py b/app/kg/iri.py index cde495b..0f0cb80 100644 --- a/app/kg/iri.py +++ b/app/kg/iri.py @@ -73,3 +73,6 @@ def is_string_type(self) -> bool: def get_prefix(self) -> Optional[str]: return self.prefix + + def get_value(self) -> str: + return self.value From b83b4a3d8a136b2641fee56303f63e0f1f8e7aee Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 13 Jun 2024 14:11:33 +0200 Subject: [PATCH 26/61] HHT-669: Transformers add common entities by demand --- app/core/__fixture__/simple_node.jsonld | 3 +- app/core/test_graph_fixture.py | 7 - .../__fixture__/deployment.jsonld | 130 --------------- .../__fixture__/deployment.turtle | 10 ++ .../__fixture__/master_node.jsonld | 114 ------------- .../__fixture__/master_node.turtle | 17 ++ app/k8s_transform/__fixture__/pod1.jsonld | 155 ------------------ app/k8s_transform/__fixture__/pod2.jsonld | 155 ------------------ app/k8s_transform/__fixture__/pod3.jsonld | 155 ------------------ .../__fixture__/replicaset.jsonld | 140 ---------------- .../__fixture__/replicaset.turtle | 6 + .../__fixture__/statefulset.jsonld | 140 ---------------- .../__fixture__/statefulset.turtle | 6 + .../__fixture__/worker_node.jsonld | 124 -------------- .../__fixture__/worker_node.turtle | 13 ++ app/k8s_transform/test_node_transformer.py | 1 - app/k8s_transform/test_pod_transformer.py | 1 - .../test_workload_transformer.py | 1 - app/k8s_transform/upper_ontology_base.py | 63 ++++--- app/kg/graph.py | 4 + app/kg/inmemory_graph.py | 3 + app/metric_transform/__fixture__/node.jsonld | 6 +- app/metric_transform/__fixture__/node.turtle | 2 +- app/metric_transform/__fixture__/pod1.jsonld | 6 +- app/metric_transform/__fixture__/pod1.turtle | 2 +- .../node_metric_transformer.py | 3 - .../pod_metric_transformer.py | 3 - 27 files changed, 105 insertions(+), 1165 deletions(-) diff --git a/app/core/__fixture__/simple_node.jsonld b/app/core/__fixture__/simple_node.jsonld index 30cae3a..7cfc9d4 100644 --- a/app/core/__fixture__/simple_node.jsonld +++ b/app/core/__fixture__/simple_node.jsonld @@ -14,13 +14,12 @@ { "@id": "glc:Core", "@type": "glc:MeasurementUnit", - "glc:hasDescription": "Cores", "glc:hasID": "glc:Core" }, { "@id": "glc:ResourceSpecification", "@type": "glc:MeasuringResource", - "glc:hasDescription": "Test", + "glc:hasDescription": "ResourceSpecification", "glc:hasID": "glc:ResourceSpecification", "glc:makes": { "@id": "glc:measurement1", diff --git a/app/core/test_graph_fixture.py b/app/core/test_graph_fixture.py index 319278b..cc3a83a 100644 --- a/app/core/test_graph_fixture.py +++ b/app/core/test_graph_fixture.py @@ -27,13 +27,6 @@ def simple_node(self) -> Tuple[Graph, str]: transformer.add_work_producing_resource(node1, "Node") transformer.add_work_producing_resource(cpu, "CPU") transformer.add_subresource(node1, cpu) - transformer.add_unit(UpperOntologyBase.UNIT_CPU_CORE_ID, "Cores") - transformer.add_measurement_property( - UpperOntologyBase.PROPERTY_CPU_CAPACITY, None - ) - transformer.add_measuring_resource( - UpperOntologyBase.MEASURING_RESOURCE_NODE_K8S_SPEC_ID, "Test" - ) transformer.add_measurement( measurement_id, "CPU.MAX", diff --git a/app/k8s_transform/__fixture__/deployment.jsonld b/app/k8s_transform/__fixture__/deployment.jsonld index 595ac51..cb878ea 100644 --- a/app/k8s_transform/__fixture__/deployment.jsonld +++ b/app/k8s_transform/__fixture__/deployment.jsonld @@ -11,104 +11,16 @@ "@type": "glc:MeasurementUnit", "glc:hasID": "glc:Bytes" }, - { - "@id": "glc:CPU.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:CPU.Available" - }, - { - "@id": "glc:CPU.Capacity", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:CPU.Capacity" - }, - { - "@id": "glc:CPU.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:CPU.Usage" - }, { "@id": "glc:Core", "@type": "glc:MeasurementUnit", "glc:hasID": "glc:Core" }, - { - "@id": "glc:Energy.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Energy.Available" - }, - { - "@id": "glc:Energy.Index", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Energy.Index" - }, - { - "@id": "glc:Energy.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Energy.Usage" - }, - { - "@id": "glc:EnergyBenchmark", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "EnergyBenchmark", - "glc:hasID": "glc:EnergyBenchmark" - }, - { - "@id": "glc:GB", - "@type": "glc:MeasurementUnit", - "glc:hasID": "glc:GB" - }, - { - "@id": "glc:GPU.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:GPU.Available" - }, - { - "@id": "glc:GPU.Capacity", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:GPU.Capacity" - }, - { - "@id": "glc:GPU.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:GPU.Usage" - }, - { - "@id": "glc:Kepler", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "Kepler metrics https://sustainable-computing.io/", - "glc:hasID": "glc:Kepler" - }, - { - "@id": "glc:MB", - "@type": "glc:MeasurementUnit", - "glc:hasID": "glc:MB" - }, { "@id": "glc:Milliwatt", "@type": "glc:MeasurementUnit", "glc:hasID": "glc:Milliwatt" }, - { - "@id": "glc:Network.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Network.Available" - }, - { - "@id": "glc:Network.Capacity", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Network.Capacity" - }, - { - "@id": "glc:Network.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Network.Usage" - }, - { - "@id": "glc:NodeExporter", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "NodeExporter", - "glc:hasID": "glc:NodeExporter" - }, { "@id": "glc:Performance", "@type": "glc:Aspect", @@ -119,48 +31,6 @@ "@type": "glc:Aspect", "glc:hasID": "glc:Power" }, - { - "@id": "glc:RAM.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:RAM.Available" - }, - { - "@id": "glc:RAM.Capacity", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:RAM.Capacity" - }, - { - "@id": "glc:RAM.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:RAM.Usage" - }, - { - "@id": "glc:ResourceSpecification", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "ResourceSpecification", - "glc:hasID": "glc:ResourceSpecification" - }, - { - "@id": "glc:Storage.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Storage.Available" - }, - { - "@id": "glc:Storage.Capacity", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Storage.Capacity" - }, - { - "@id": "glc:Storage.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Storage.Usage" - }, - { - "@id": "glc:cAdvisor", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", - "glc:hasID": "glc:cAdvisor" - }, { "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb", "@type": "glc:AssignedTask", diff --git a/app/k8s_transform/__fixture__/deployment.turtle b/app/k8s_transform/__fixture__/deployment.turtle index e93c2f7..b1317cf 100644 --- a/app/k8s_transform/__fixture__/deployment.turtle +++ b/app/k8s_transform/__fixture__/deployment.turtle @@ -65,6 +65,16 @@ cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated glc:measu cluster:crd-resource.930f2fc9-2b5e-46c7-a11f-4374fe55fafd rdf:type glc:AssignedTask . cluster:crd-resource.930f2fc9-2b5e-46c7-a11f-4374fe55fafd glc:hasDescription "CRD" . cluster:crd-resource.930f2fc9-2b5e-46c7-a11f-4374fe55fafd glc:makes cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb . +glc:Bytes rdf:type glc:MeasurementUnit . +glc:Bytes glc:hasID glc:Bytes . +glc:Core rdf:type glc:MeasurementUnit . +glc:Core glc:hasID glc:Core . +glc:Milliwatt rdf:type glc:MeasurementUnit . +glc:Milliwatt glc:hasID glc:Milliwatt . +glc:Performance rdf:type glc:Aspect . +glc:Performance glc:hasID glc:Performance . +glc:Power rdf:type glc:Aspect . +glc:Power glc:hasID glc:Power . glc:default-scheduler rdf:type glc:Scheduler . glc:default-scheduler glc:assigns cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb . glc:default-scheduler glc:hasID glc:default-scheduler . diff --git a/app/k8s_transform/__fixture__/master_node.jsonld b/app/k8s_transform/__fixture__/master_node.jsonld index b441bc3..dc23a8c 100644 --- a/app/k8s_transform/__fixture__/master_node.jsonld +++ b/app/k8s_transform/__fixture__/master_node.jsonld @@ -103,129 +103,31 @@ "@type": "glc:MeasurementUnit", "glc:hasID": "glc:Bytes" }, - { - "@id": "glc:CPU.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:CPU.Available" - }, { "@id": "glc:CPU.Capacity", "@type": "glc:MeasurementProperty", "glc:hasID": "glc:CPU.Capacity" }, - { - "@id": "glc:CPU.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:CPU.Usage" - }, { "@id": "glc:Core", "@type": "glc:MeasurementUnit", "glc:hasID": "glc:Core" }, - { - "@id": "glc:Energy.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Energy.Available" - }, { "@id": "glc:Energy.Index", "@type": "glc:MeasurementProperty", "glc:hasID": "glc:Energy.Index" }, - { - "@id": "glc:Energy.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Energy.Usage" - }, - { - "@id": "glc:EnergyBenchmark", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "EnergyBenchmark", - "glc:hasID": "glc:EnergyBenchmark" - }, - { - "@id": "glc:GB", - "@type": "glc:MeasurementUnit", - "glc:hasID": "glc:GB" - }, - { - "@id": "glc:GPU.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:GPU.Available" - }, - { - "@id": "glc:GPU.Capacity", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:GPU.Capacity" - }, - { - "@id": "glc:GPU.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:GPU.Usage" - }, - { - "@id": "glc:Kepler", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "Kepler metrics https://sustainable-computing.io/", - "glc:hasID": "glc:Kepler" - }, - { - "@id": "glc:MB", - "@type": "glc:MeasurementUnit", - "glc:hasID": "glc:MB" - }, { "@id": "glc:Milliwatt", "@type": "glc:MeasurementUnit", "glc:hasID": "glc:Milliwatt" }, - { - "@id": "glc:Network.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Network.Available" - }, - { - "@id": "glc:Network.Capacity", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Network.Capacity" - }, - { - "@id": "glc:Network.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Network.Usage" - }, - { - "@id": "glc:NodeExporter", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "NodeExporter", - "glc:hasID": "glc:NodeExporter" - }, - { - "@id": "glc:Performance", - "@type": "glc:Aspect", - "glc:hasID": "glc:Performance" - }, - { - "@id": "glc:Power", - "@type": "glc:Aspect", - "glc:hasID": "glc:Power" - }, - { - "@id": "glc:RAM.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:RAM.Available" - }, { "@id": "glc:RAM.Capacity", "@type": "glc:MeasurementProperty", "glc:hasID": "glc:RAM.Capacity" }, - { - "@id": "glc:RAM.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:RAM.Usage" - }, { "@id": "glc:ResourceSpecification", "@type": "glc:MeasuringResource", @@ -240,26 +142,10 @@ ] } }, - { - "@id": "glc:Storage.Available", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Storage.Available" - }, { "@id": "glc:Storage.Capacity", "@type": "glc:MeasurementProperty", "glc:hasID": "glc:Storage.Capacity" - }, - { - "@id": "glc:Storage.Usage", - "@type": "glc:MeasurementProperty", - "glc:hasID": "glc:Storage.Usage" - }, - { - "@id": "glc:cAdvisor", - "@type": "glc:MeasuringResource", - "glc:hasDescription": "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", - "glc:hasID": "glc:cAdvisor" } ] } diff --git a/app/k8s_transform/__fixture__/master_node.turtle b/app/k8s_transform/__fixture__/master_node.turtle index 86ed596..e211969 100644 --- a/app/k8s_transform/__fixture__/master_node.turtle +++ b/app/k8s_transform/__fixture__/master_node.turtle @@ -54,4 +54,21 @@ cluster:glaciation-test-master01.Storage.Capacity glc:hasValue 47266578354^^ None: graph = InMemoryGraph() context = TransformationContext(123) transformer = NodesToRDFTransformer(node_json, graph) - transformer.add_common_entities() transformer.transform(context) JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) diff --git a/app/k8s_transform/test_pod_transformer.py b/app/k8s_transform/test_pod_transformer.py index 26f75a7..a764d90 100644 --- a/app/k8s_transform/test_pod_transformer.py +++ b/app/k8s_transform/test_pod_transformer.py @@ -42,7 +42,6 @@ def transform_jsonld(self, file_id: str) -> None: graph = InMemoryGraph() context = TransformationContext(123) transformer = PodToRDFTransformer(node_json, graph) - transformer.add_common_entities() transformer.transform(context) JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) diff --git a/app/k8s_transform/test_workload_transformer.py b/app/k8s_transform/test_workload_transformer.py index 0ccaddd..07bfe82 100644 --- a/app/k8s_transform/test_workload_transformer.py +++ b/app/k8s_transform/test_workload_transformer.py @@ -42,7 +42,6 @@ def transform_jsonld(self, file_id: str) -> None: graph = InMemoryGraph() context = TransformationContext(123) transformer = WorkloadToRDFTransformer(node_json, graph) - transformer.add_common_entities() transformer.transform(context) JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) diff --git a/app/k8s_transform/upper_ontology_base.py b/app/k8s_transform/upper_ontology_base.py index 458b09b..54e352f 100644 --- a/app/k8s_transform/upper_ontology_base.py +++ b/app/k8s_transform/upper_ontology_base.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Dict, List, Optional from app.kg.graph import Graph from app.kg.iri import IRI @@ -55,6 +55,14 @@ class UpperOntologyBase: MEASURING_RESOURCE_NODE_CADVISOR_ID = IRI(GLACIATION_PREFIX, "cAdvisor") MEASURING_RESOURCE_NODE_ENERGY_BENCHMARK = IRI(GLACIATION_PREFIX, "EnergyBenchmark") + MEASURING_RESOURCE_DESCRIPTIONS: Dict[IRI, str] = { + MEASURING_RESOURCE_KEPLER_ID: "Kepler metrics https://sustainable-computing.io/", + MEASURING_RESOURCE_NODE_EXPORTER_ID: "NodeExporter", + MEASURING_RESOURCE_NODE_K8S_SPEC_ID: "ResourceSpecification", + MEASURING_RESOURCE_NODE_CADVISOR_ID: "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", # noqa: E501 + MEASURING_RESOURCE_NODE_ENERGY_BENCHMARK: "EnergyBenchmark", + } + # Units UNIT_CPU_CORE_ID = IRI(GLACIATION_PREFIX, "Core") UNIT_GB_ID = IRI(GLACIATION_PREFIX, "GB") @@ -89,12 +97,19 @@ class UpperOntologyBase: units: List[IRI] aspects: List[IRI] properties: List[IRI] + measuring_resources: List[IRI] sink: Graph def __init__(self, sink: Graph): self.sink = sink - + self.measuring_resources = [ + self.MEASURING_RESOURCE_KEPLER_ID, + self.MEASURING_RESOURCE_NODE_EXPORTER_ID, + self.MEASURING_RESOURCE_NODE_K8S_SPEC_ID, + self.MEASURING_RESOURCE_NODE_CADVISOR_ID, + self.MEASURING_RESOURCE_NODE_ENERGY_BENCHMARK, + ] self.units = [ self.UNIT_CPU_CORE_ID, self.UNIT_GB_ID, @@ -164,6 +179,16 @@ def add_measurement( measuring_resource: IRI, ) -> None: self.add_common_info(identifier, self.MEASUREMENT, description) + + if not self.sink.has_node(unit): + self.add_unit(unit, None) + + if not self.sink.has_node(related_to_property): + self.add_measurement_property(related_to_property, None) + + if not self.sink.has_node(measuring_resource): + self.add_measuring_resource(measuring_resource) + self.sink.add_property( identifier, self.HAS_VALUE, @@ -182,9 +207,6 @@ def add_measurement( self.sink.add_relation(identifier, self.MEASURED_IN, unit) self.sink.add_relation(measuring_resource, self.MAKES, identifier) - def add_measuring_resource(self, identifier: IRI, description: str) -> None: - self.add_common_info(identifier, self.MEASURING_RESOURCE, description) - def add_unit(self, identifier: IRI, description: Optional[str]) -> None: self.add_common_info(identifier, self.MEASUREMENT_UNIT, description) @@ -197,6 +219,12 @@ def add_constraint( unit: IRI, aspect: IRI, ) -> None: + if not self.sink.has_node(unit): + self.add_unit(unit, None) + + if not self.sink.has_node(aspect): + self.add_aspect(aspect, None) + if is_soft_constraint: self.add_common_info(identifier, self.SOFT_CONSTRAINT, description) else: @@ -244,23 +272,8 @@ def add_common_info( ) def add_common_entities(self) -> None: - self.add_measuring_resource( - self.MEASURING_RESOURCE_KEPLER_ID, - "Kepler metrics https://sustainable-computing.io/", - ) - self.add_measuring_resource( - self.MEASURING_RESOURCE_NODE_EXPORTER_ID, "NodeExporter" - ) - self.add_measuring_resource( - self.MEASURING_RESOURCE_NODE_K8S_SPEC_ID, "ResourceSpecification" - ) - self.add_measuring_resource( - self.MEASURING_RESOURCE_NODE_CADVISOR_ID, - """cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md""", # noqa: E501 - ) - self.add_measuring_resource( - self.MEASURING_RESOURCE_NODE_ENERGY_BENCHMARK, "EnergyBenchmark" - ) + for measuring_resource in self.measuring_resources: + self.add_measuring_resource(measuring_resource) for unit in self.units: self.add_unit(unit, None) @@ -270,3 +283,9 @@ def add_common_entities(self) -> None: for property in self.properties: self.add_measurement_property(property, None) + + def add_measuring_resource(self, measuring_resource_id: IRI) -> None: + description = self.MEASURING_RESOURCE_DESCRIPTIONS.get(measuring_resource_id) + self.add_common_info( + measuring_resource_id, self.MEASURING_RESOURCE, description + ) diff --git a/app/kg/graph.py b/app/kg/graph.py index 097c166..f570b1f 100644 --- a/app/kg/graph.py +++ b/app/kg/graph.py @@ -38,6 +38,10 @@ def add_relation_collection( ) -> None: raise NotImplementedError + @abstractmethod + def has_node(self, node_id: IRI) -> bool: + raise NotImplementedError + @abstractmethod def get_ids(self) -> Set[IRI]: raise NotImplementedError diff --git a/app/kg/inmemory_graph.py b/app/kg/inmemory_graph.py index 7474b9c..f075288 100644 --- a/app/kg/inmemory_graph.py +++ b/app/kg/inmemory_graph.py @@ -128,6 +128,9 @@ def add_relation_collection( self.get_or_add_node(object_id) edge.add_object_id(object_id) + def has_node(self, node_id: IRI) -> bool: + return node_id in self.nodes + def get_ids(self) -> Set[IRI]: return set(self.nodes.keys()) diff --git a/app/metric_transform/__fixture__/node.jsonld b/app/metric_transform/__fixture__/node.jsonld index 722e380..96bb868 100644 --- a/app/metric_transform/__fixture__/node.jsonld +++ b/app/metric_transform/__fixture__/node.jsonld @@ -49,11 +49,7 @@ { "@id": "glc:cAdvisor", "@type": "glc:MeasuringResource", - "glc:hasDescription": { - "@set": [ - "cAdvisor" - ] - }, + "glc:hasDescription": "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", "glc:hasID": "glc:cAdvisor", "glc:makes": { "@set": [ diff --git a/app/metric_transform/__fixture__/node.turtle b/app/metric_transform/__fixture__/node.turtle index 30729b5..2daf68c 100644 --- a/app/metric_transform/__fixture__/node.turtle +++ b/app/metric_transform/__fixture__/node.turtle @@ -23,7 +23,7 @@ glc:CPU.Usage glc:hasID glc:CPU.Usage . glc:Energy.Usage rdf:type glc:MeasurementProperty . glc:Energy.Usage glc:hasID glc:Energy.Usage . glc:cAdvisor rdf:type glc:MeasuringResource . -glc:cAdvisor glc:hasDescription ("cAdvisor") . +glc:cAdvisor glc:hasDescription "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md" . glc:cAdvisor glc:hasID glc:cAdvisor . glc:cAdvisor glc:makes (cluster:worker1.CPU.Usage cluster:worker1.Energy.Usage) . glc:coreseconds rdf:type glc:MeasurementUnit . diff --git a/app/metric_transform/__fixture__/pod1.jsonld b/app/metric_transform/__fixture__/pod1.jsonld index 60db92a..4573cfb 100644 --- a/app/metric_transform/__fixture__/pod1.jsonld +++ b/app/metric_transform/__fixture__/pod1.jsonld @@ -48,11 +48,7 @@ { "@id": "glc:cAdvisor", "@type": "glc:MeasuringResource", - "glc:hasDescription": { - "@set": [ - "cAdvisor" - ] - }, + "glc:hasDescription": "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", "glc:hasID": "glc:cAdvisor", "glc:makes": { "@set": [ diff --git a/app/metric_transform/__fixture__/pod1.turtle b/app/metric_transform/__fixture__/pod1.turtle index 5e4c1e4..933f78b 100644 --- a/app/metric_transform/__fixture__/pod1.turtle +++ b/app/metric_transform/__fixture__/pod1.turtle @@ -20,7 +20,7 @@ glc:CPU.Usage glc:hasID glc:CPU.Usage . glc:Energy.Usage rdf:type glc:MeasurementProperty . glc:Energy.Usage glc:hasID glc:Energy.Usage . glc:cAdvisor rdf:type glc:MeasuringResource . -glc:cAdvisor glc:hasDescription ("cAdvisor") . +glc:cAdvisor glc:hasDescription "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md" . glc:cAdvisor glc:hasID glc:cAdvisor . glc:cAdvisor glc:makes (cluster:pod1.CPU.Usage cluster:pod1.Energy.Usage) . glc:coreseconds rdf:type glc:MeasurementUnit . diff --git a/app/metric_transform/node_metric_transformer.py b/app/metric_transform/node_metric_transformer.py index f5f4a3c..e7c20b2 100644 --- a/app/metric_transform/node_metric_transformer.py +++ b/app/metric_transform/node_metric_transformer.py @@ -25,9 +25,6 @@ def transform(self, context: TransformationContext) -> None: property_id = IRI(self.GLACIATION_PREFIX, query.property) unit_id = IRI(self.GLACIATION_PREFIX, query.unit) source_id = IRI(self.GLACIATION_PREFIX, query.source) - self.add_measurement_property(property_id, None) - self.add_unit(unit_id, None) - self.add_measuring_resource(source_id, query.source) self.add_work_producing_resource(parent_resource_id, None) self.add_measurement( measurement_id, diff --git a/app/metric_transform/pod_metric_transformer.py b/app/metric_transform/pod_metric_transformer.py index d7a5641..c47414c 100644 --- a/app/metric_transform/pod_metric_transformer.py +++ b/app/metric_transform/pod_metric_transformer.py @@ -22,9 +22,6 @@ def transform(self, context: TransformationContext) -> None: property_id = IRI(self.GLACIATION_PREFIX, query.property) unit_id = IRI(self.GLACIATION_PREFIX, query.unit) source_id = IRI(self.GLACIATION_PREFIX, query.source) - self.add_measurement_property(property_id, None) - self.add_unit(unit_id, None) - self.add_measuring_resource(source_id, query.source) self.add_work_producing_resource(pod_id, None) self.add_measurement( measurement_id, From 7ef60424d562acf6041014fe038e4d6fc9066792 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 13 Jun 2024 14:39:56 +0200 Subject: [PATCH 27/61] HHT-669: KG Slice assembler and tests --- .../snapshot/empty/slice_127.0.0.1_80.jsonld | 16 + .../graph_glaciation-test-master01_80.jsonld | 1 - .../slice_glaciation-test-master01_80.jsonld | 513 +++ app/core/test_kgslice_assembler.py | 10 +- .../__fixture__/cluster.nodes.json | 2751 ++++++++--------- app/k8s_transform/cluster_transformer.py | 23 +- 6 files changed, 1921 insertions(+), 1393 deletions(-) create mode 100644 app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld delete mode 100644 app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld create mode 100644 app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld diff --git a/app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld b/app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld new file mode 100644 index 0000000..c6854b3 --- /dev/null +++ b/app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld @@ -0,0 +1,16 @@ +{ + "@context": { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + }, + "@graph": [ + { + "@id": "cluster:Unknown", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesCluster", + "glc:hasID": "cluster:Unknown" + } + ] +} diff --git a/app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld deleted file mode 100644 index 0967ef4..0000000 --- a/app/core/__fixture__/snapshot/minimal/graph_glaciation-test-master01_80.jsonld +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld new file mode 100644 index 0000000..74faf27 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld @@ -0,0 +1,513 @@ +{ + "@context": { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + }, + "@graph": [ + { + "@id": "cluster:coredns-787d4945fb-l85r5", + "@type": "glc:WorkProducingResource", + "glc:hasID": "cluster:coredns-787d4945fb-l85r5", + "glc:hasMeasurement": { + "@id": "cluster:coredns-787d4945fb-l85r5.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": { + "@set": [ + "Usage" + ] + }, + "glc:hasTimestamp": { + "@set": [ + 17100500 + ] + }, + "glc:hasValue": { + "@set": [ + 41.0, + 42.0, + 43.0 + ] + }, + "glc:hasID": "cluster:coredns-787d4945fb-l85r5.Usage", + "glc:measuredIn": { + "@set": [ + "glc:bytes", + "glc:coreseconds" + ] + }, + "glc:relatesToMeasurementProperty": { + "@set": [ + "glc:CPU.Usage", + "glc:Network.Usage", + "glc:RAM.Usage", + "glc:Storage.Usage" + ] + } + } + }, + { + "@id": "cluster:glaciation-test-master01", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": { + "@set": [ + "KubernetesWorkerNode" + ] + }, + "glc:hasID": "cluster:glaciation-test-master01", + "glc:hasMeasurement": { + "@id": "cluster:glaciation-test-master01.Energy.Index", + "@type": "glc:Measurement", + "glc:hasDescription": "Energy.Index", + "glc:hasTimestamp": 1, + "glc:hasValue": 1001, + "glc:hasID": "cluster:glaciation-test-master01.Energy.Index", + "glc:measuredIn": "glc:Milliwatt", + "glc:relatesToMeasurementProperty": "glc:Energy.Index" + }, + "glc:hasStatus": { + "@id": "cluster:glaciation-test-master01.Status", + "@type": "glc:Status", + "glc:hasDescription": "Ready", + "glc:startTime": "2024-02-13T13:53:43Z", + "glc:hasID": "cluster:glaciation-test-master01.Status" + }, + "glc:hasSubResource": { + "@set": [ + "cluster:glaciation-test-master01.CPU", + "cluster:glaciation-test-master01.GPU", + "cluster:glaciation-test-master01.Network", + "cluster:glaciation-test-master01.RAM", + "cluster:glaciation-test-master01.Storage" + ] + } + }, + { + "@id": "cluster:glaciation-test-master01.CPU", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "CPU", + "glc:hasID": "cluster:glaciation-test-master01.CPU", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-master01.CPU.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity CPU", + "glc:hasTimestamp": 1, + "glc:hasValue": 4, + "glc:hasID": "cluster:glaciation-test-master01.CPU.Capacity", + "glc:measuredIn": "glc:Core", + "glc:relatesToMeasurementProperty": "glc:CPU.Capacity" + }, + { + "@id": "cluster:glaciation-test-master01.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 41.0, + "glc:hasID": "cluster:glaciation-test-master01.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + } + ] + } + }, + { + "@id": "cluster:glaciation-test-master01.GPU", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "GPU", + "glc:hasID": "cluster:glaciation-test-master01.GPU" + }, + { + "@id": "cluster:glaciation-test-master01.Network", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Network", + "glc:hasID": "cluster:glaciation-test-master01.Network", + "glc:hasMeasurement": { + "@id": "cluster:glaciation-test-master01.Network.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:glaciation-test-master01.Network.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Network.Usage" + } + }, + { + "@id": "cluster:glaciation-test-master01.RAM", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "RAM", + "glc:hasID": "cluster:glaciation-test-master01.RAM", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-master01.RAM.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity", + "glc:hasTimestamp": 1, + "glc:hasValue": 16673939456, + "glc:hasID": "cluster:glaciation-test-master01.RAM.Capacity", + "glc:measuredIn": "glc:Bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Capacity" + }, + { + "@id": "cluster:glaciation-test-master01.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:glaciation-test-master01.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + } + ] + } + }, + { + "@id": "cluster:glaciation-test-master01.Storage", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "EphemeralStorage", + "glc:hasID": "cluster:glaciation-test-master01.Storage", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-master01.Storage.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity", + "glc:hasTimestamp": 1, + "glc:hasValue": 47266578354, + "glc:hasID": "cluster:glaciation-test-master01.Storage.Capacity", + "glc:measuredIn": "glc:Bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Capacity" + }, + { + "@id": "cluster:glaciation-test-master01.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:glaciation-test-master01.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + } + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Pod", + "k8s:hasName": "coredns-787d4945fb-l85r5", + "glc:consumes": "cluster:glaciation-test-master01", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5", + "glc:hasSubResource": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://0ce09d1e8fdff70a58902bb3e73efafa76035dddbe9cec8b4115ac80d09f9963", + "k8s:hasContainerName": "coredns", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns" + }, + { + "@id": "cluster:kubernetes", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesCluster", + "glc:hasID": "cluster:kubernetes", + "glc:hasSubResource": "cluster:glaciation-test-master01" + }, + { + "@id": "glc:Bytes", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Bytes" + }, + { + "@id": "glc:CPU.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Capacity" + }, + { + "@id": "glc:CPU.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Usage" + }, + { + "@id": "glc:Core", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Core" + }, + { + "@id": "glc:Energy.Index", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Energy.Index" + }, + { + "@id": "glc:Milliwatt", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Milliwatt" + }, + { + "@id": "glc:Network.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Network.Usage" + }, + { + "@id": "glc:Performance", + "@type": "glc:Aspect", + "glc:hasID": "glc:Performance" + }, + { + "@id": "glc:Power", + "@type": "glc:Aspect", + "glc:hasID": "glc:Power" + }, + { + "@id": "glc:RAM.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:RAM.Capacity" + }, + { + "@id": "glc:RAM.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:RAM.Usage" + }, + { + "@id": "glc:ResourceSpecification", + "@type": "glc:MeasuringResource", + "glc:hasDescription": "ResourceSpecification", + "glc:hasID": "glc:ResourceSpecification", + "glc:makes": { + "@set": [ + "cluster:glaciation-test-master01.CPU.Capacity", + "cluster:glaciation-test-master01.Energy.Index", + "cluster:glaciation-test-master01.RAM.Capacity", + "cluster:glaciation-test-master01.Storage.Capacity" + ] + } + }, + { + "@id": "glc:Storage.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Storage.Capacity" + }, + { + "@id": "glc:Storage.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Storage.Usage" + }, + { + "@id": "glc:bytes", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:bytes" + }, + { + "@id": "glc:cAdvisor", + "@type": "glc:MeasuringResource", + "glc:hasDescription": "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", + "glc:hasID": "glc:cAdvisor", + "glc:makes": { + "@set": [ + "cluster:coredns-787d4945fb-l85r5.Usage", + "cluster:glaciation-test-master01.CPU.Usage", + "cluster:glaciation-test-master01.Network.Usage", + "cluster:glaciation-test-master01.RAM.Usage", + "cluster:glaciation-test-master01.Storage.Usage" + ] + } + }, + { + "@id": "glc:coreseconds", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:coreseconds" + }, + { + "@id": "cluster:coredns-787d4945fb.1", + "@type": "glc:AssignedTask", + "glc:hasDescription": "ReplicaSet", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:coredns-787d4945fb.1.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.1, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.1.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns-787d4945fb.1.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 73400320.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.1.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns-787d4945fb.1.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 178257920.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.1.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:coredns-787d4945fb.1" + }, + { + "@id": "cluster:coredns-787d4945fb.None", + "@type": "glc:AssignedTask", + "glc:hasDescription": "ReplicaSet", + "glc:makes": "cluster:kube-system.coredns-787d4945fb-l85r5" + }, + { + "@id": "cluster:coredns.1", + "@type": "glc:AssignedTask", + "glc:hasDescription": "Deployment", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:coredns.1.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.30000000000000004, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns.1.Energy.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "Energy.Allocated", + "glc:maxValue": 100.0, + "glc:hasAspect": "glc:Power", + "glc:hasID": "cluster:coredns.1.Energy.Allocated", + "glc:measuredIn": "glc:Milliwatt" + }, + { + "@id": "cluster:coredns.1.Energy.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "Energy.Capacity", + "glc:maxValue": 100.0, + "glc:hasAspect": "glc:Power", + "glc:hasID": "cluster:coredns.1.Energy.Capacity", + "glc:measuredIn": "glc:Milliwatt" + }, + { + "@id": "cluster:coredns.1.GPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "GPU.Allocated", + "glc:maxValue": 101.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.GPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns.1.GPU.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "GPU.Capacity", + "glc:maxValue": 101.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.GPU.Capacity", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns.1.Network.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "Network.Allocated", + "glc:maxValue": 1010.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.Network.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.1.Network.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "Network.Capacity", + "glc:maxValue": 1010.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.Network.Capacity", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.1.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 220200960.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.1.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 534773760.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.1.Storage.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "Storage.Allocated", + "glc:maxValue": 100500.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.1.Storage.Allocated", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:coredns.1" + }, + { + "@id": "cluster:coredns.None", + "@type": "glc:AssignedTask", + "glc:hasDescription": "Deployment", + "glc:hasSubTask": "cluster:coredns-787d4945fb.1" + }, + { + "@id": "cluster:crd-resource.None", + "@type": "glc:AssignedTask", + "glc:hasDescription": "CRD", + "glc:makes": "cluster:coredns.1" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.Status", + "@type": "glc:Status", + "glc:hasDescription": "Running", + "glc:startTime": "2024-02-13T13:53:43Z", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.Status" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns.Status", + "@type": "glc:Status", + "glc:hasDescription": "running", + "glc:startTime": "2024-02-13T13:53:44Z", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns.Status" + }, + { + "@id": "glc:default-scheduler", + "@type": "glc:Scheduler", + "glc:assigns": { + "@set": [ + "cluster:coredns-787d4945fb.1", + "cluster:coredns.1" + ] + }, + "glc:hasID": "glc:default-scheduler", + "glc:manages": { + "@set": [ + "cluster:kube-system.coredns-787d4945fb-l85r5", + "cluster:kube-system.coredns-787d4945fb-l85r5.coredns" + ] + } + } + ] +} diff --git a/app/core/test_kgslice_assembler.py b/app/core/test_kgslice_assembler.py index fac902b..4fafebe 100644 --- a/app/core/test_kgslice_assembler.py +++ b/app/core/test_kgslice_assembler.py @@ -4,12 +4,14 @@ from app.core.kg_slice_assembler import KGSliceAssembler from app.core.test_snapshot_base import SnapshotTestBase -from app.core.types import DKGSlice, KGSliceId -from app.kg.inmemory_graph import InMemoryGraph +from app.core.types import KGSliceId from app.kg.iri import IRI class KGSliceAssemblerTest(TestCase, SnapshotTestBase): + def setUp(self): + self.maxDiff = None + def test_assemble_empty(self) -> None: now = 1 slice_id = KGSliceId("127.0.0.1", 80) @@ -21,7 +23,9 @@ def test_assemble_empty(self) -> None: slice_id, inputs, ) - self.assertEqual(DKGSlice(slice_id, InMemoryGraph(), now), actual) + self.assertEqual(slice_id, actual.slice_id) + self.assertEqual(now, actual.timestamp) + self.assert_graph(actual.graph, "empty", slice_id) def test_assemble_minimal(self) -> None: now = 1 diff --git a/app/k8s_transform/__fixture__/cluster.nodes.json b/app/k8s_transform/__fixture__/cluster.nodes.json index 754998f..304d592 100644 --- a/app/k8s_transform/__fixture__/cluster.nodes.json +++ b/app/k8s_transform/__fixture__/cluster.nodes.json @@ -1,1405 +1,1398 @@ -{ - "apiVersion": "v1", - "items": [ - { - "apiVersion": "v1", - "kind": "Node", - "metadata": { - "annotations": { - "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-mast01\"}", - "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"92:de:31:02:33:e5\"}", - "flannel.alpha.coreos.com/backend-type": "vxlan", - "flannel.alpha.coreos.com/kube-subnet-manager": "true", - "flannel.alpha.coreos.com/public-ip": "10.14.2.5", - "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", - "node.alpha.kubernetes.io/ttl": "0", - "volumes.kubernetes.io/controller-managed-attach-detach": "true" - }, - "creationTimestamp": "2023-10-19T23:21:56Z", - "labels": { - "beta.kubernetes.io/arch": "amd64", - "beta.kubernetes.io/os": "linux", - "directpv.min.io/identity": "directpv-min-io", - "directpv.min.io/node": "glaciation-mast01", - "directpv.min.io/rack": "default", - "directpv.min.io/region": "default", - "directpv.min.io/zone": "default", - "kubernetes.io/arch": "amd64", - "kubernetes.io/hostname": "glaciation-mast01", - "kubernetes.io/os": "linux", - "node-role.kubernetes.io/control-plane": "", - "node.kubernetes.io/exclude-from-external-load-balancers": "" - }, - "name": "glaciation-mast01", - "resourceVersion": "19338976", - "uid": "6619e599-9171-4de9-ae0b-f349d8a92c8e" +[ + { + "apiVersion": "v1", + "kind": "Node", + "metadata": { + "annotations": { + "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-mast01\"}", + "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"92:de:31:02:33:e5\"}", + "flannel.alpha.coreos.com/backend-type": "vxlan", + "flannel.alpha.coreos.com/kube-subnet-manager": "true", + "flannel.alpha.coreos.com/public-ip": "10.14.2.5", + "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", + "node.alpha.kubernetes.io/ttl": "0", + "volumes.kubernetes.io/controller-managed-attach-detach": "true" }, - "spec": { - "podCIDR": "10.244.0.0/24", - "podCIDRs": [ - "10.244.0.0/24" - ] + "creationTimestamp": "2023-10-19T23:21:56Z", + "labels": { + "beta.kubernetes.io/arch": "amd64", + "beta.kubernetes.io/os": "linux", + "directpv.min.io/identity": "directpv-min-io", + "directpv.min.io/node": "glaciation-mast01", + "directpv.min.io/rack": "default", + "directpv.min.io/region": "default", + "directpv.min.io/zone": "default", + "kubernetes.io/arch": "amd64", + "kubernetes.io/hostname": "glaciation-mast01", + "kubernetes.io/os": "linux", + "node-role.kubernetes.io/control-plane": "", + "node.kubernetes.io/exclude-from-external-load-balancers": "" }, - "status": { - "addresses": [ - { - "address": "10.14.2.5", - "type": "InternalIP" - }, - { - "address": "glaciation-mast01", - "type": "Hostname" - } - ], - "allocatable": { - "cpu": "128", - "ephemeral-storage": "1699178737718", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "263490004Ki", - "pods": "110" - }, - "capacity": { - "cpu": "128", - "ephemeral-storage": "1843726932Ki", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "263592404Ki", - "pods": "110" - }, - "conditions": [ - { - "lastHeartbeatTime": "2024-02-29T12:17:33Z", - "lastTransitionTime": "2024-02-29T12:17:33Z", - "message": "Flannel is running on this node", - "reason": "FlannelIsUp", - "status": "False", - "type": "NetworkUnavailable" - }, - { - "lastHeartbeatTime": "2024-03-01T14:17:08Z", - "lastTransitionTime": "2023-10-19T23:21:55Z", - "message": "kubelet has sufficient memory available", - "reason": "KubeletHasSufficientMemory", - "status": "False", - "type": "MemoryPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:17:08Z", - "lastTransitionTime": "2023-10-19T23:21:55Z", - "message": "kubelet has no disk pressure", - "reason": "KubeletHasNoDiskPressure", - "status": "False", - "type": "DiskPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:17:08Z", - "lastTransitionTime": "2023-10-19T23:21:55Z", - "message": "kubelet has sufficient PID available", - "reason": "KubeletHasSufficientPID", - "status": "False", - "type": "PIDPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:17:08Z", - "lastTransitionTime": "2024-02-16T14:32:45Z", - "message": "kubelet is posting ready status. AppArmor enabled", - "reason": "KubeletReady", - "status": "True", - "type": "Ready" - } - ], - "daemonEndpoints": { - "kubeletEndpoint": { - "Port": 10250 - } - }, - "images": [ - { - "names": [ - "docker.elastic.co/elasticsearch/elasticsearch@sha256:d784066422aec9f66ae424f692d2416057e78853ab015915a04530570c955cc8", - "docker.elastic.co/elasticsearch/elasticsearch:8.5.1" - ], - "sizeBytes": 683984297 - }, - { - "names": [ - "docker.io/pournima2024/excel-image-name@sha256:3a3a64ccba4f7ee7fd96bf529aee8ace1c9ddf7a74e0db66540f32000d08b288", - "docker.io/pournima2024/excel-image-name:tag" - ], - "sizeBytes": 377376998 - }, - { - "names": [ - "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", - "docker.io/secoresearch/fuseki:latest" - ], - "sizeBytes": 338692664 - }, - { - "names": [ - "docker.io/grafana/grafana@sha256:e3e9c2b5776fe3657f4954dfa91579224f98a0316f51d431989b15425e95530f", - "docker.io/grafana/grafana:latest" - ], - "sizeBytes": 111048170 - }, - { - "names": [ - "docker.io/grafana/grafana@sha256:0679e877ba204cede473782d5aba962831a3449092da120aba7d24082efe3fde", - "docker.io/grafana/grafana:10.1.5" - ], - "sizeBytes": 109599242 - }, - { - "names": [ - "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", - "docker.io/parklize/fuseki:v1" - ], - "sizeBytes": 105267079 - }, - { - "names": [ - "registry.k8s.io/etcd@sha256:e013d0d5e4e25d00c61a7ff839927a1f36479678f11e49502b53a5e0b14f10c3", - "registry.k8s.io/etcd:3.5.9-0" - ], - "sizeBytes": 102894559 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:bc1794e85c9e00293351b967efa267ce6af1c824ac875a9d0c7ac84700a8b53e", - "docker.io/prom/prometheus:latest" - ], - "sizeBytes": 101487261 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:beb5e30ffba08d9ae8a7961b9a2145fc8af6296ff2a4f463df7cd722fcbfc789" - ], - "sizeBytes": 99657259 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:a67e5e402ff5410b86ec48b39eab1a3c4df2a7e78a71bf025ec5e32e09090ad4" - ], - "sizeBytes": 98721042 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:b440bc0e8aa5bab44a782952c09516b6a50f9d7b2325c1ffafac7bc833298e2e" - ], - "sizeBytes": 98718836 - }, - { - "names": [ - "quay.io/prometheus/prometheus@sha256:089b3beab1304d83280c589a81f6f72ca42006910ff903ea3cf25f97fddc49ea", - "quay.io/prometheus/prometheus:v2.47.1" - ], - "sizeBytes": 98167945 - }, - { - "names": [ - "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", - "quay.io/minio/directpv:v4.0.10" - ], - "sizeBytes": 81808104 - }, - { - "names": [ - "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", - "quay.io/sustainable_computing_io/kepler:release-0.7.2" - ], - "sizeBytes": 81436291 - }, - { - "names": [ - "quay.io/minio/operator@sha256:bf19ad5a3ba1bd2951e582cd13891073c5314d0d1d5c27fdca3a5ec85bbc7920", - "quay.io/minio/operator:v5.0.12" - ], - "sizeBytes": 37579913 - }, - { - "names": [ - "registry.k8s.io/kube-apiserver@sha256:6beea2e5531a0606613594fd3ed92d71bbdcef99dd3237522049a0b32cad736c", - "registry.k8s.io/kube-apiserver:v1.28.2" - ], - "sizeBytes": 34662976 - }, - { - "names": [ - "registry.k8s.io/kube-controller-manager@sha256:6a42ce14d716205a99763f3c732c0a8f0ea041bdbbea7d2dfffcc53dafd7cac4", - "registry.k8s.io/kube-controller-manager:v1.28.2" - ], - "sizeBytes": 33395782 - }, - { - "names": [ - "quay.io/prometheus/alertmanager@sha256:361db356b33041437517f1cd298462055580585f26555c317df1a3caf2868552", - "quay.io/prometheus/alertmanager:v0.26.0" - ], - "sizeBytes": 31252082 - }, - { - "names": [ - "quay.io/minio/csi-provisioner@sha256:7b5c070ec70d30b0895d91b10c39a0e6cc81c18e0d1566c77aeff2a3587fa316" - ], - "sizeBytes": 28202105 - }, - { - "names": [ - "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", - "docker.io/flannel/flannel:v0.22.3" - ], - "sizeBytes": 27017673 - }, - { - "names": [ - "quay.io/minio/csi-resizer@sha256:819f68a4daf75acec336302843f303cf360d4941249f9f5019ffbb690c8ac7c0" - ], - "sizeBytes": 26524686 - }, - { - "names": [ - "quay.io/kiwigrid/k8s-sidecar@sha256:415d07ee1027c3ff7af9e26e05e03ffd0ec0ccf9f619ac00ab24366efe4343bd", - "quay.io/kiwigrid/k8s-sidecar:1.25.1" - ], - "sizeBytes": 25110109 - }, - { - "names": [ - "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", - "registry.k8s.io/kube-proxy:v1.28.2" - ], - "sizeBytes": 24558871 - }, - { - "names": [ - "registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:4d99688e557396f5baa150e019ff7d5b7334f9b9f9a8dab64038c5c2a006f6b5", - "registry.k8s.io/ingress-nginx/kube-webhook-certgen:v20221220-controller-v1.5.1-58-g787ea74b6" - ], - "sizeBytes": 19687037 - }, - { - "names": [ - "registry.k8s.io/metrics-server/metrics-server@sha256:1c0419326500f1704af580d12a579671b2c3a06a8aa918cd61d0a35fb2d6b3ce", - "registry.k8s.io/metrics-server/metrics-server:v0.7.0" - ], - "sizeBytes": 19435023 - }, - { - "names": [ - "registry.k8s.io/kube-scheduler@sha256:6511193f8114a2f011790619698efe12a8119ed9a17e2e36f4c1c759ccf173ab", - "registry.k8s.io/kube-scheduler:v1.28.2" - ], - "sizeBytes": 18811134 - }, - { - "names": [ - "quay.io/prometheus-operator/prometheus-operator@sha256:70f02a0fd9181a4e1afd5135e2dbb5d4264f7828df1d567b4d83a3eab63deb01", - "quay.io/prometheus-operator/prometheus-operator:v0.68.0" - ], - "sizeBytes": 16473146 - }, - { - "names": [ - "registry.k8s.io/coredns/coredns@sha256:a0ead06651cf580044aeb0a0feba63591858fb2e43ade8c9dea45a6a89ae7e5e", - "registry.k8s.io/coredns/coredns:v1.10.1" - ], - "sizeBytes": 16190758 - }, - { - "names": [ - "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", - "docker.io/mrlhansen/idrac_exporter:c9f9bfe" - ], - "sizeBytes": 15698654 - }, - { - "names": [ - "quay.io/prometheus-operator/prometheus-config-reloader@sha256:2eada80dc7264dd53fcfa67ca86f8a398a07112cce7017eb44d6574483801356", - "quay.io/prometheus-operator/prometheus-config-reloader:v0.68.0" - ], - "sizeBytes": 13755925 - }, - { - "names": [ - "registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:af8220f534938de121a694cb7314313a6195c9d494fc30bfa6885b08a276bb82", - "registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.10.1" - ], - "sizeBytes": 13464332 - }, - { - "names": [ - "registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:ec5d6f6be2280212da0e55572b67d76f29c0342b3b6d24cc47b32d80ee3374a4", - "registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.10.0" - ], - "sizeBytes": 13454920 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", - "docker.io/prom/node-exporter:latest" - ], - "sizeBytes": 11777109 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter:v1.6.1" - ], - "sizeBytes": 11725464 - }, - { - "names": [ - "quay.io/prometheus/pushgateway@sha256:979a69ab4a4016c89f2b1c53dacaf6190cd676c9d55f7659aabdd208ba48b7c7", - "quay.io/prometheus/pushgateway:v1.6.2" - ], - "sizeBytes": 11593933 - }, - { - "names": [ - "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" - ], - "sizeBytes": 10387407 - }, - { - "names": [ - "docker.io/prom/snmp-exporter@sha256:9b0914d7bd409cd97d5a123752b35fdc634998101ac1c51ee3cf1d3e5c8a88b3", - "docker.io/prom/snmp-exporter:v0.21.0" - ], - "sizeBytes": 9814054 - }, - { - "names": [ - "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" - ], - "sizeBytes": 9430791 - }, - { - "names": [ - "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", - "docker.io/flannel/flannel-cni-plugin:v1.2.0" - ], - "sizeBytes": 3879095 - }, - { - "names": [ - "registry.k8s.io/pause@sha256:7031c1b283388d2c2e09b57badb803c05ebed362dc88d84b480cc47f72a21097", - "registry.k8s.io/pause:3.9" - ], - "sizeBytes": 321520 - }, - { - "names": [ - "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", - "registry.k8s.io/pause:3.8" - ], - "sizeBytes": 311286 - } - ], - "nodeInfo": { - "architecture": "amd64", - "bootID": "0d7b618d-f565-400b-8f50-34d991baf17e", - "containerRuntimeVersion": "containerd://1.7.2", - "kernelVersion": "5.15.0-97-generic", - "kubeProxyVersion": "v1.28.2", - "kubeletVersion": "v1.28.2", - "machineID": "01f492f5179144dbaf18a56d18110114", - "operatingSystem": "linux", - "osImage": "Ubuntu 22.04.3 LTS", - "systemUUID": "4c4c4544-0036-5010-8039-b4c04f4b5a33" + "name": "glaciation-mast01", + "resourceVersion": "19338976", + "uid": "6619e599-9171-4de9-ae0b-f349d8a92c8e" + }, + "spec": { + "podCIDR": "10.244.0.0/24", + "podCIDRs": [ + "10.244.0.0/24" + ] + }, + "status": { + "addresses": [ + { + "address": "10.14.2.5", + "type": "InternalIP" + }, + { + "address": "glaciation-mast01", + "type": "Hostname" + } + ], + "allocatable": { + "cpu": "128", + "ephemeral-storage": "1699178737718", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "263490004Ki", + "pods": "110" + }, + "capacity": { + "cpu": "128", + "ephemeral-storage": "1843726932Ki", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "263592404Ki", + "pods": "110" + }, + "conditions": [ + { + "lastHeartbeatTime": "2024-02-29T12:17:33Z", + "lastTransitionTime": "2024-02-29T12:17:33Z", + "message": "Flannel is running on this node", + "reason": "FlannelIsUp", + "status": "False", + "type": "NetworkUnavailable" + }, + { + "lastHeartbeatTime": "2024-03-01T14:17:08Z", + "lastTransitionTime": "2023-10-19T23:21:55Z", + "message": "kubelet has sufficient memory available", + "reason": "KubeletHasSufficientMemory", + "status": "False", + "type": "MemoryPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:17:08Z", + "lastTransitionTime": "2023-10-19T23:21:55Z", + "message": "kubelet has no disk pressure", + "reason": "KubeletHasNoDiskPressure", + "status": "False", + "type": "DiskPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:17:08Z", + "lastTransitionTime": "2023-10-19T23:21:55Z", + "message": "kubelet has sufficient PID available", + "reason": "KubeletHasSufficientPID", + "status": "False", + "type": "PIDPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:17:08Z", + "lastTransitionTime": "2024-02-16T14:32:45Z", + "message": "kubelet is posting ready status. AppArmor enabled", + "reason": "KubeletReady", + "status": "True", + "type": "Ready" + } + ], + "daemonEndpoints": { + "kubeletEndpoint": { + "Port": 10250 + } + }, + "images": [ + { + "names": [ + "docker.elastic.co/elasticsearch/elasticsearch@sha256:d784066422aec9f66ae424f692d2416057e78853ab015915a04530570c955cc8", + "docker.elastic.co/elasticsearch/elasticsearch:8.5.1" + ], + "sizeBytes": 683984297 + }, + { + "names": [ + "docker.io/pournima2024/excel-image-name@sha256:3a3a64ccba4f7ee7fd96bf529aee8ace1c9ddf7a74e0db66540f32000d08b288", + "docker.io/pournima2024/excel-image-name:tag" + ], + "sizeBytes": 377376998 + }, + { + "names": [ + "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", + "docker.io/secoresearch/fuseki:latest" + ], + "sizeBytes": 338692664 + }, + { + "names": [ + "docker.io/grafana/grafana@sha256:e3e9c2b5776fe3657f4954dfa91579224f98a0316f51d431989b15425e95530f", + "docker.io/grafana/grafana:latest" + ], + "sizeBytes": 111048170 + }, + { + "names": [ + "docker.io/grafana/grafana@sha256:0679e877ba204cede473782d5aba962831a3449092da120aba7d24082efe3fde", + "docker.io/grafana/grafana:10.1.5" + ], + "sizeBytes": 109599242 + }, + { + "names": [ + "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", + "docker.io/parklize/fuseki:v1" + ], + "sizeBytes": 105267079 + }, + { + "names": [ + "registry.k8s.io/etcd@sha256:e013d0d5e4e25d00c61a7ff839927a1f36479678f11e49502b53a5e0b14f10c3", + "registry.k8s.io/etcd:3.5.9-0" + ], + "sizeBytes": 102894559 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:bc1794e85c9e00293351b967efa267ce6af1c824ac875a9d0c7ac84700a8b53e", + "docker.io/prom/prometheus:latest" + ], + "sizeBytes": 101487261 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:beb5e30ffba08d9ae8a7961b9a2145fc8af6296ff2a4f463df7cd722fcbfc789" + ], + "sizeBytes": 99657259 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:a67e5e402ff5410b86ec48b39eab1a3c4df2a7e78a71bf025ec5e32e09090ad4" + ], + "sizeBytes": 98721042 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:b440bc0e8aa5bab44a782952c09516b6a50f9d7b2325c1ffafac7bc833298e2e" + ], + "sizeBytes": 98718836 + }, + { + "names": [ + "quay.io/prometheus/prometheus@sha256:089b3beab1304d83280c589a81f6f72ca42006910ff903ea3cf25f97fddc49ea", + "quay.io/prometheus/prometheus:v2.47.1" + ], + "sizeBytes": 98167945 + }, + { + "names": [ + "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", + "quay.io/minio/directpv:v4.0.10" + ], + "sizeBytes": 81808104 + }, + { + "names": [ + "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", + "quay.io/sustainable_computing_io/kepler:release-0.7.2" + ], + "sizeBytes": 81436291 + }, + { + "names": [ + "quay.io/minio/operator@sha256:bf19ad5a3ba1bd2951e582cd13891073c5314d0d1d5c27fdca3a5ec85bbc7920", + "quay.io/minio/operator:v5.0.12" + ], + "sizeBytes": 37579913 + }, + { + "names": [ + "registry.k8s.io/kube-apiserver@sha256:6beea2e5531a0606613594fd3ed92d71bbdcef99dd3237522049a0b32cad736c", + "registry.k8s.io/kube-apiserver:v1.28.2" + ], + "sizeBytes": 34662976 + }, + { + "names": [ + "registry.k8s.io/kube-controller-manager@sha256:6a42ce14d716205a99763f3c732c0a8f0ea041bdbbea7d2dfffcc53dafd7cac4", + "registry.k8s.io/kube-controller-manager:v1.28.2" + ], + "sizeBytes": 33395782 + }, + { + "names": [ + "quay.io/prometheus/alertmanager@sha256:361db356b33041437517f1cd298462055580585f26555c317df1a3caf2868552", + "quay.io/prometheus/alertmanager:v0.26.0" + ], + "sizeBytes": 31252082 + }, + { + "names": [ + "quay.io/minio/csi-provisioner@sha256:7b5c070ec70d30b0895d91b10c39a0e6cc81c18e0d1566c77aeff2a3587fa316" + ], + "sizeBytes": 28202105 + }, + { + "names": [ + "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", + "docker.io/flannel/flannel:v0.22.3" + ], + "sizeBytes": 27017673 + }, + { + "names": [ + "quay.io/minio/csi-resizer@sha256:819f68a4daf75acec336302843f303cf360d4941249f9f5019ffbb690c8ac7c0" + ], + "sizeBytes": 26524686 + }, + { + "names": [ + "quay.io/kiwigrid/k8s-sidecar@sha256:415d07ee1027c3ff7af9e26e05e03ffd0ec0ccf9f619ac00ab24366efe4343bd", + "quay.io/kiwigrid/k8s-sidecar:1.25.1" + ], + "sizeBytes": 25110109 + }, + { + "names": [ + "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", + "registry.k8s.io/kube-proxy:v1.28.2" + ], + "sizeBytes": 24558871 + }, + { + "names": [ + "registry.k8s.io/ingress-nginx/kube-webhook-certgen@sha256:4d99688e557396f5baa150e019ff7d5b7334f9b9f9a8dab64038c5c2a006f6b5", + "registry.k8s.io/ingress-nginx/kube-webhook-certgen:v20221220-controller-v1.5.1-58-g787ea74b6" + ], + "sizeBytes": 19687037 + }, + { + "names": [ + "registry.k8s.io/metrics-server/metrics-server@sha256:1c0419326500f1704af580d12a579671b2c3a06a8aa918cd61d0a35fb2d6b3ce", + "registry.k8s.io/metrics-server/metrics-server:v0.7.0" + ], + "sizeBytes": 19435023 + }, + { + "names": [ + "registry.k8s.io/kube-scheduler@sha256:6511193f8114a2f011790619698efe12a8119ed9a17e2e36f4c1c759ccf173ab", + "registry.k8s.io/kube-scheduler:v1.28.2" + ], + "sizeBytes": 18811134 + }, + { + "names": [ + "quay.io/prometheus-operator/prometheus-operator@sha256:70f02a0fd9181a4e1afd5135e2dbb5d4264f7828df1d567b4d83a3eab63deb01", + "quay.io/prometheus-operator/prometheus-operator:v0.68.0" + ], + "sizeBytes": 16473146 + }, + { + "names": [ + "registry.k8s.io/coredns/coredns@sha256:a0ead06651cf580044aeb0a0feba63591858fb2e43ade8c9dea45a6a89ae7e5e", + "registry.k8s.io/coredns/coredns:v1.10.1" + ], + "sizeBytes": 16190758 + }, + { + "names": [ + "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", + "docker.io/mrlhansen/idrac_exporter:c9f9bfe" + ], + "sizeBytes": 15698654 + }, + { + "names": [ + "quay.io/prometheus-operator/prometheus-config-reloader@sha256:2eada80dc7264dd53fcfa67ca86f8a398a07112cce7017eb44d6574483801356", + "quay.io/prometheus-operator/prometheus-config-reloader:v0.68.0" + ], + "sizeBytes": 13755925 + }, + { + "names": [ + "registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:af8220f534938de121a694cb7314313a6195c9d494fc30bfa6885b08a276bb82", + "registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.10.1" + ], + "sizeBytes": 13464332 + }, + { + "names": [ + "registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:ec5d6f6be2280212da0e55572b67d76f29c0342b3b6d24cc47b32d80ee3374a4", + "registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.10.0" + ], + "sizeBytes": 13454920 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", + "docker.io/prom/node-exporter:latest" + ], + "sizeBytes": 11777109 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter:v1.6.1" + ], + "sizeBytes": 11725464 + }, + { + "names": [ + "quay.io/prometheus/pushgateway@sha256:979a69ab4a4016c89f2b1c53dacaf6190cd676c9d55f7659aabdd208ba48b7c7", + "quay.io/prometheus/pushgateway:v1.6.2" + ], + "sizeBytes": 11593933 + }, + { + "names": [ + "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" + ], + "sizeBytes": 10387407 + }, + { + "names": [ + "docker.io/prom/snmp-exporter@sha256:9b0914d7bd409cd97d5a123752b35fdc634998101ac1c51ee3cf1d3e5c8a88b3", + "docker.io/prom/snmp-exporter:v0.21.0" + ], + "sizeBytes": 9814054 + }, + { + "names": [ + "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" + ], + "sizeBytes": 9430791 + }, + { + "names": [ + "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", + "docker.io/flannel/flannel-cni-plugin:v1.2.0" + ], + "sizeBytes": 3879095 + }, + { + "names": [ + "registry.k8s.io/pause@sha256:7031c1b283388d2c2e09b57badb803c05ebed362dc88d84b480cc47f72a21097", + "registry.k8s.io/pause:3.9" + ], + "sizeBytes": 321520 + }, + { + "names": [ + "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", + "registry.k8s.io/pause:3.8" + ], + "sizeBytes": 311286 } + ], + "nodeInfo": { + "architecture": "amd64", + "bootID": "0d7b618d-f565-400b-8f50-34d991baf17e", + "containerRuntimeVersion": "containerd://1.7.2", + "kernelVersion": "5.15.0-97-generic", + "kubeProxyVersion": "v1.28.2", + "kubeletVersion": "v1.28.2", + "machineID": "01f492f5179144dbaf18a56d18110114", + "operatingSystem": "linux", + "osImage": "Ubuntu 22.04.3 LTS", + "systemUUID": "4c4c4544-0036-5010-8039-b4c04f4b5a33" } + } + }, + { + "apiVersion": "v1", + "kind": "Node", + "metadata": { + "annotations": { + "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker01\"}", + "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"ae:27:33:36:91:5c\"}", + "flannel.alpha.coreos.com/backend-type": "vxlan", + "flannel.alpha.coreos.com/kube-subnet-manager": "true", + "flannel.alpha.coreos.com/public-ip": "10.14.2.8", + "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", + "node.alpha.kubernetes.io/ttl": "0", + "volumes.kubernetes.io/controller-managed-attach-detach": "true" + }, + "creationTimestamp": "2023-10-23T09:30:09Z", + "labels": { + "beta.kubernetes.io/arch": "amd64", + "beta.kubernetes.io/os": "linux", + "directpv.min.io/identity": "directpv-min-io", + "directpv.min.io/node": "glaciation-worker01", + "directpv.min.io/rack": "default", + "directpv.min.io/region": "default", + "directpv.min.io/zone": "default", + "kubernetes.io/arch": "amd64", + "kubernetes.io/hostname": "glaciation-worker01", + "kubernetes.io/os": "linux" + }, + "name": "glaciation-worker01", + "resourceVersion": "19338926", + "uid": "d45b30ad-ac6a-4e0b-b539-d6df84f68037" + }, + "spec": { + "podCIDR": "10.244.5.0/24", + "podCIDRs": [ + "10.244.5.0/24" + ] }, - { - "apiVersion": "v1", - "kind": "Node", - "metadata": { - "annotations": { - "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker01\"}", - "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"ae:27:33:36:91:5c\"}", - "flannel.alpha.coreos.com/backend-type": "vxlan", - "flannel.alpha.coreos.com/kube-subnet-manager": "true", - "flannel.alpha.coreos.com/public-ip": "10.14.2.8", - "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", - "node.alpha.kubernetes.io/ttl": "0", - "volumes.kubernetes.io/controller-managed-attach-detach": "true" - }, - "creationTimestamp": "2023-10-23T09:30:09Z", - "labels": { - "beta.kubernetes.io/arch": "amd64", - "beta.kubernetes.io/os": "linux", - "directpv.min.io/identity": "directpv-min-io", - "directpv.min.io/node": "glaciation-worker01", - "directpv.min.io/rack": "default", - "directpv.min.io/region": "default", - "directpv.min.io/zone": "default", - "kubernetes.io/arch": "amd64", - "kubernetes.io/hostname": "glaciation-worker01", - "kubernetes.io/os": "linux" - }, - "name": "glaciation-worker01", - "resourceVersion": "19338926", - "uid": "d45b30ad-ac6a-4e0b-b539-d6df84f68037" + "status": { + "addresses": [ + { + "address": "10.14.2.8", + "type": "InternalIP" + }, + { + "address": "glaciation-worker01", + "type": "Hostname" + } + ], + "allocatable": { + "cpu": "48", + "ephemeral-storage": "1698231303429", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "131232216Ki", + "pods": "110" }, - "spec": { - "podCIDR": "10.244.5.0/24", - "podCIDRs": [ - "10.244.5.0/24" - ] + "capacity": { + "cpu": "48", + "ephemeral-storage": "1842698900Ki", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "131334616Ki", + "pods": "110" }, - "status": { - "addresses": [ - { - "address": "10.14.2.8", - "type": "InternalIP" - }, - { - "address": "glaciation-worker01", - "type": "Hostname" - } - ], - "allocatable": { - "cpu": "48", - "ephemeral-storage": "1698231303429", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "131232216Ki", - "pods": "110" - }, - "capacity": { - "cpu": "48", - "ephemeral-storage": "1842698900Ki", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "131334616Ki", - "pods": "110" - }, - "conditions": [ - { - "lastHeartbeatTime": "2023-12-11T11:10:13Z", - "lastTransitionTime": "2023-12-11T11:10:13Z", - "message": "Flannel is running on this node", - "reason": "FlannelIsUp", - "status": "False", - "type": "NetworkUnavailable" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:48Z", - "lastTransitionTime": "2023-12-11T11:10:09Z", - "message": "kubelet has sufficient memory available", - "reason": "KubeletHasSufficientMemory", - "status": "False", - "type": "MemoryPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:48Z", - "lastTransitionTime": "2023-12-11T11:10:09Z", - "message": "kubelet has no disk pressure", - "reason": "KubeletHasNoDiskPressure", - "status": "False", - "type": "DiskPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:48Z", - "lastTransitionTime": "2023-12-11T11:10:09Z", - "message": "kubelet has sufficient PID available", - "reason": "KubeletHasSufficientPID", - "status": "False", - "type": "PIDPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:48Z", - "lastTransitionTime": "2024-02-01T13:17:44Z", - "message": "kubelet is posting ready status. AppArmor enabled", - "reason": "KubeletReady", - "status": "True", - "type": "Ready" - } - ], - "daemonEndpoints": { - "kubeletEndpoint": { - "Port": 10250 - } - }, - "images": [ - { - "names": [ - "docker.io/pournima2024/excel-image-name@sha256:3a3a64ccba4f7ee7fd96bf529aee8ace1c9ddf7a74e0db66540f32000d08b288", - "docker.io/pournima2024/excel-image-name:tag" - ], - "sizeBytes": 377376998 - }, - { - "names": [ - "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", - "docker.io/secoresearch/fuseki:latest" - ], - "sizeBytes": 338692664 - }, - { - "names": [ - "docker.io/grafana/grafana@sha256:7567a7c70a3c1d75aeeedc968d1304174a16651e55a60d1fb132a05e1e63a054", - "docker.io/grafana/grafana:latest" - ], - "sizeBytes": 113011352 - }, - { - "names": [ - "docker.io/grafana/grafana@sha256:1ee0c54286b8ca09a3dd1419ff8653e7780a148a006ac088544203bb0affe550" - ], - "sizeBytes": 111225801 - }, - { - "names": [ - "docker.io/grafana/grafana@sha256:0679e877ba204cede473782d5aba962831a3449092da120aba7d24082efe3fde", - "docker.io/grafana/grafana:10.1.5" - ], - "sizeBytes": 109599242 - }, - { - "names": [ - "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", - "docker.io/parklize/fuseki:v1" - ], - "sizeBytes": 105267079 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:bc1794e85c9e00293351b967efa267ce6af1c824ac875a9d0c7ac84700a8b53e", - "docker.io/prom/prometheus:latest" - ], - "sizeBytes": 101487261 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:beb5e30ffba08d9ae8a7961b9a2145fc8af6296ff2a4f463df7cd722fcbfc789" - ], - "sizeBytes": 99657259 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:a67e5e402ff5410b86ec48b39eab1a3c4df2a7e78a71bf025ec5e32e09090ad4" - ], - "sizeBytes": 98721042 - }, - { - "names": [ - "docker.io/prom/prometheus@sha256:3002935850ea69a59816825d4cb718fafcdb9b124e4e6153ebc6894627525f7f" - ], - "sizeBytes": 98160864 - }, - { - "names": [ - "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", - "quay.io/minio/directpv:v4.0.10" - ], - "sizeBytes": 81808104 - }, - { - "names": [ - "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", - "quay.io/sustainable_computing_io/kepler:release-0.7.2" - ], - "sizeBytes": 81436291 - }, - { - "names": [ - "docker.io/library/nginx@sha256:c26ae7472d624ba1fafd296e73cecc4f93f853088e6a9c13c0d52f6ca5865107", - "docker.io/library/nginx:latest" - ], - "sizeBytes": 70534948 - }, - { - "names": [ - "quay.io/minio/operator@sha256:bf19ad5a3ba1bd2951e582cd13891073c5314d0d1d5c27fdca3a5ec85bbc7920", - "quay.io/minio/operator:v5.0.12" - ], - "sizeBytes": 37579913 - }, - { - "names": [ - "quay.io/minio/csi-provisioner@sha256:7b5c070ec70d30b0895d91b10c39a0e6cc81c18e0d1566c77aeff2a3587fa316" - ], - "sizeBytes": 28202105 - }, - { - "names": [ - "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", - "docker.io/flannel/flannel:v0.22.3" - ], - "sizeBytes": 27017673 - }, - { - "names": [ - "quay.io/minio/csi-resizer@sha256:819f68a4daf75acec336302843f303cf360d4941249f9f5019ffbb690c8ac7c0" - ], - "sizeBytes": 26524686 - }, - { - "names": [ - "quay.io/kiwigrid/k8s-sidecar@sha256:415d07ee1027c3ff7af9e26e05e03ffd0ec0ccf9f619ac00ab24366efe4343bd", - "quay.io/kiwigrid/k8s-sidecar:1.25.1" - ], - "sizeBytes": 25110109 - }, - { - "names": [ - "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", - "registry.k8s.io/kube-proxy:v1.28.2" - ], - "sizeBytes": 24558871 - }, - { - "names": [ - "registry.k8s.io/metrics-server/metrics-server@sha256:1c0419326500f1704af580d12a579671b2c3a06a8aa918cd61d0a35fb2d6b3ce", - "registry.k8s.io/metrics-server/metrics-server:v0.7.0" - ], - "sizeBytes": 19435023 - }, - { - "names": [ - "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", - "docker.io/mrlhansen/idrac_exporter:c9f9bfe" - ], - "sizeBytes": 15698654 - }, - { - "names": [ - "registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:ec5d6f6be2280212da0e55572b67d76f29c0342b3b6d24cc47b32d80ee3374a4", - "registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.10.0" - ], - "sizeBytes": 13454920 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", - "docker.io/prom/node-exporter:latest" - ], - "sizeBytes": 11777109 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter:v1.6.1" - ], - "sizeBytes": 11725464 - }, - { - "names": [ - "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" - ], - "sizeBytes": 10387407 - }, - { - "names": [ - "docker.io/prom/snmp-exporter@sha256:9b0914d7bd409cd97d5a123752b35fdc634998101ac1c51ee3cf1d3e5c8a88b3", - "docker.io/prom/snmp-exporter:v0.21.0" - ], - "sizeBytes": 9814054 - }, - { - "names": [ - "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" - ], - "sizeBytes": 9430791 - }, - { - "names": [ - "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", - "docker.io/flannel/flannel-cni-plugin:v1.2.0" - ], - "sizeBytes": 3879095 - }, - { - "names": [ - "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", - "registry.k8s.io/pause:3.8" - ], - "sizeBytes": 311286 - } - ], - "nodeInfo": { - "architecture": "amd64", - "bootID": "431faa8b-0fea-470d-be88-0dba83c98541", - "containerRuntimeVersion": "containerd://1.7.2", - "kernelVersion": "5.15.0-89-generic", - "kubeProxyVersion": "v1.28.2", - "kubeletVersion": "v1.28.2", - "machineID": "d5a5b73df3904cd9aa97155f96c53b13", - "operatingSystem": "linux", - "osImage": "Ubuntu 22.04.3 LTS", - "systemUUID": "4c4c4544-004e-5810-8039-b9c04f4b5a33" + "conditions": [ + { + "lastHeartbeatTime": "2023-12-11T11:10:13Z", + "lastTransitionTime": "2023-12-11T11:10:13Z", + "message": "Flannel is running on this node", + "reason": "FlannelIsUp", + "status": "False", + "type": "NetworkUnavailable" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:48Z", + "lastTransitionTime": "2023-12-11T11:10:09Z", + "message": "kubelet has sufficient memory available", + "reason": "KubeletHasSufficientMemory", + "status": "False", + "type": "MemoryPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:48Z", + "lastTransitionTime": "2023-12-11T11:10:09Z", + "message": "kubelet has no disk pressure", + "reason": "KubeletHasNoDiskPressure", + "status": "False", + "type": "DiskPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:48Z", + "lastTransitionTime": "2023-12-11T11:10:09Z", + "message": "kubelet has sufficient PID available", + "reason": "KubeletHasSufficientPID", + "status": "False", + "type": "PIDPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:48Z", + "lastTransitionTime": "2024-02-01T13:17:44Z", + "message": "kubelet is posting ready status. AppArmor enabled", + "reason": "KubeletReady", + "status": "True", + "type": "Ready" } + ], + "daemonEndpoints": { + "kubeletEndpoint": { + "Port": 10250 + } + }, + "images": [ + { + "names": [ + "docker.io/pournima2024/excel-image-name@sha256:3a3a64ccba4f7ee7fd96bf529aee8ace1c9ddf7a74e0db66540f32000d08b288", + "docker.io/pournima2024/excel-image-name:tag" + ], + "sizeBytes": 377376998 + }, + { + "names": [ + "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", + "docker.io/secoresearch/fuseki:latest" + ], + "sizeBytes": 338692664 + }, + { + "names": [ + "docker.io/grafana/grafana@sha256:7567a7c70a3c1d75aeeedc968d1304174a16651e55a60d1fb132a05e1e63a054", + "docker.io/grafana/grafana:latest" + ], + "sizeBytes": 113011352 + }, + { + "names": [ + "docker.io/grafana/grafana@sha256:1ee0c54286b8ca09a3dd1419ff8653e7780a148a006ac088544203bb0affe550" + ], + "sizeBytes": 111225801 + }, + { + "names": [ + "docker.io/grafana/grafana@sha256:0679e877ba204cede473782d5aba962831a3449092da120aba7d24082efe3fde", + "docker.io/grafana/grafana:10.1.5" + ], + "sizeBytes": 109599242 + }, + { + "names": [ + "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", + "docker.io/parklize/fuseki:v1" + ], + "sizeBytes": 105267079 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:bc1794e85c9e00293351b967efa267ce6af1c824ac875a9d0c7ac84700a8b53e", + "docker.io/prom/prometheus:latest" + ], + "sizeBytes": 101487261 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:beb5e30ffba08d9ae8a7961b9a2145fc8af6296ff2a4f463df7cd722fcbfc789" + ], + "sizeBytes": 99657259 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:a67e5e402ff5410b86ec48b39eab1a3c4df2a7e78a71bf025ec5e32e09090ad4" + ], + "sizeBytes": 98721042 + }, + { + "names": [ + "docker.io/prom/prometheus@sha256:3002935850ea69a59816825d4cb718fafcdb9b124e4e6153ebc6894627525f7f" + ], + "sizeBytes": 98160864 + }, + { + "names": [ + "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", + "quay.io/minio/directpv:v4.0.10" + ], + "sizeBytes": 81808104 + }, + { + "names": [ + "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", + "quay.io/sustainable_computing_io/kepler:release-0.7.2" + ], + "sizeBytes": 81436291 + }, + { + "names": [ + "docker.io/library/nginx@sha256:c26ae7472d624ba1fafd296e73cecc4f93f853088e6a9c13c0d52f6ca5865107", + "docker.io/library/nginx:latest" + ], + "sizeBytes": 70534948 + }, + { + "names": [ + "quay.io/minio/operator@sha256:bf19ad5a3ba1bd2951e582cd13891073c5314d0d1d5c27fdca3a5ec85bbc7920", + "quay.io/minio/operator:v5.0.12" + ], + "sizeBytes": 37579913 + }, + { + "names": [ + "quay.io/minio/csi-provisioner@sha256:7b5c070ec70d30b0895d91b10c39a0e6cc81c18e0d1566c77aeff2a3587fa316" + ], + "sizeBytes": 28202105 + }, + { + "names": [ + "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", + "docker.io/flannel/flannel:v0.22.3" + ], + "sizeBytes": 27017673 + }, + { + "names": [ + "quay.io/minio/csi-resizer@sha256:819f68a4daf75acec336302843f303cf360d4941249f9f5019ffbb690c8ac7c0" + ], + "sizeBytes": 26524686 + }, + { + "names": [ + "quay.io/kiwigrid/k8s-sidecar@sha256:415d07ee1027c3ff7af9e26e05e03ffd0ec0ccf9f619ac00ab24366efe4343bd", + "quay.io/kiwigrid/k8s-sidecar:1.25.1" + ], + "sizeBytes": 25110109 + }, + { + "names": [ + "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", + "registry.k8s.io/kube-proxy:v1.28.2" + ], + "sizeBytes": 24558871 + }, + { + "names": [ + "registry.k8s.io/metrics-server/metrics-server@sha256:1c0419326500f1704af580d12a579671b2c3a06a8aa918cd61d0a35fb2d6b3ce", + "registry.k8s.io/metrics-server/metrics-server:v0.7.0" + ], + "sizeBytes": 19435023 + }, + { + "names": [ + "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", + "docker.io/mrlhansen/idrac_exporter:c9f9bfe" + ], + "sizeBytes": 15698654 + }, + { + "names": [ + "registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:ec5d6f6be2280212da0e55572b67d76f29c0342b3b6d24cc47b32d80ee3374a4", + "registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.10.0" + ], + "sizeBytes": 13454920 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", + "docker.io/prom/node-exporter:latest" + ], + "sizeBytes": 11777109 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter:v1.6.1" + ], + "sizeBytes": 11725464 + }, + { + "names": [ + "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" + ], + "sizeBytes": 10387407 + }, + { + "names": [ + "docker.io/prom/snmp-exporter@sha256:9b0914d7bd409cd97d5a123752b35fdc634998101ac1c51ee3cf1d3e5c8a88b3", + "docker.io/prom/snmp-exporter:v0.21.0" + ], + "sizeBytes": 9814054 + }, + { + "names": [ + "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" + ], + "sizeBytes": 9430791 + }, + { + "names": [ + "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", + "docker.io/flannel/flannel-cni-plugin:v1.2.0" + ], + "sizeBytes": 3879095 + }, + { + "names": [ + "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", + "registry.k8s.io/pause:3.8" + ], + "sizeBytes": 311286 + } + ], + "nodeInfo": { + "architecture": "amd64", + "bootID": "431faa8b-0fea-470d-be88-0dba83c98541", + "containerRuntimeVersion": "containerd://1.7.2", + "kernelVersion": "5.15.0-89-generic", + "kubeProxyVersion": "v1.28.2", + "kubeletVersion": "v1.28.2", + "machineID": "d5a5b73df3904cd9aa97155f96c53b13", + "operatingSystem": "linux", + "osImage": "Ubuntu 22.04.3 LTS", + "systemUUID": "4c4c4544-004e-5810-8039-b9c04f4b5a33" } + } + }, + { + "apiVersion": "v1", + "kind": "Node", + "metadata": { + "annotations": { + "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker02\"}", + "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"76:25:1c:59:e7:c6\"}", + "flannel.alpha.coreos.com/backend-type": "vxlan", + "flannel.alpha.coreos.com/kube-subnet-manager": "true", + "flannel.alpha.coreos.com/public-ip": "10.14.2.0", + "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", + "node.alpha.kubernetes.io/ttl": "0", + "volumes.kubernetes.io/controller-managed-attach-detach": "true" + }, + "creationTimestamp": "2023-10-20T09:38:00Z", + "labels": { + "beta.kubernetes.io/arch": "amd64", + "beta.kubernetes.io/os": "linux", + "directpv.min.io/identity": "directpv-min-io", + "directpv.min.io/node": "glaciation-worker02", + "directpv.min.io/rack": "default", + "directpv.min.io/region": "default", + "directpv.min.io/zone": "default", + "kubernetes.io/arch": "amd64", + "kubernetes.io/hostname": "glaciation-worker02", + "kubernetes.io/os": "linux" + }, + "name": "glaciation-worker02", + "resourceVersion": "19338835", + "uid": "b5d20893-f9ed-4a8d-b908-7d0ecca21b58" }, - { - "apiVersion": "v1", - "kind": "Node", - "metadata": { - "annotations": { - "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker02\"}", - "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"76:25:1c:59:e7:c6\"}", - "flannel.alpha.coreos.com/backend-type": "vxlan", - "flannel.alpha.coreos.com/kube-subnet-manager": "true", - "flannel.alpha.coreos.com/public-ip": "10.14.2.0", - "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", - "node.alpha.kubernetes.io/ttl": "0", - "volumes.kubernetes.io/controller-managed-attach-detach": "true" - }, - "creationTimestamp": "2023-10-20T09:38:00Z", - "labels": { - "beta.kubernetes.io/arch": "amd64", - "beta.kubernetes.io/os": "linux", - "directpv.min.io/identity": "directpv-min-io", - "directpv.min.io/node": "glaciation-worker02", - "directpv.min.io/rack": "default", - "directpv.min.io/region": "default", - "directpv.min.io/zone": "default", - "kubernetes.io/arch": "amd64", - "kubernetes.io/hostname": "glaciation-worker02", - "kubernetes.io/os": "linux" - }, - "name": "glaciation-worker02", - "resourceVersion": "19338835", - "uid": "b5d20893-f9ed-4a8d-b908-7d0ecca21b58" + "spec": { + "podCIDR": "10.244.2.0/24", + "podCIDRs": [ + "10.244.2.0/24" + ] + }, + "status": { + "addresses": [ + { + "address": "10.14.2.0", + "type": "InternalIP" + }, + { + "address": "glaciation-worker02", + "type": "Hostname" + } + ], + "allocatable": { + "cpu": "6", + "ephemeral-storage": "226751816534", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "16060524Ki", + "pods": "110" }, - "spec": { - "podCIDR": "10.244.2.0/24", - "podCIDRs": [ - "10.244.2.0/24" - ] + "capacity": { + "cpu": "6", + "ephemeral-storage": "246041468Ki", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "16162924Ki", + "pods": "110" }, - "status": { - "addresses": [ - { - "address": "10.14.2.0", - "type": "InternalIP" - }, - { - "address": "glaciation-worker02", - "type": "Hostname" - } - ], - "allocatable": { - "cpu": "6", - "ephemeral-storage": "226751816534", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "16060524Ki", - "pods": "110" - }, - "capacity": { - "cpu": "6", - "ephemeral-storage": "246041468Ki", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "16162924Ki", - "pods": "110" - }, - "conditions": [ - { - "lastHeartbeatTime": "2023-12-11T11:09:38Z", - "lastTransitionTime": "2023-12-11T11:09:38Z", - "message": "Flannel is running on this node", - "reason": "FlannelIsUp", - "status": "False", - "type": "NetworkUnavailable" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:09Z", - "lastTransitionTime": "2023-12-11T11:09:24Z", - "message": "kubelet has sufficient memory available", - "reason": "KubeletHasSufficientMemory", - "status": "False", - "type": "MemoryPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:09Z", - "lastTransitionTime": "2023-12-11T11:09:24Z", - "message": "kubelet has no disk pressure", - "reason": "KubeletHasNoDiskPressure", - "status": "False", - "type": "DiskPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:09Z", - "lastTransitionTime": "2023-12-11T11:09:24Z", - "message": "kubelet has sufficient PID available", - "reason": "KubeletHasSufficientPID", - "status": "False", - "type": "PIDPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:09Z", - "lastTransitionTime": "2023-12-11T11:09:24Z", - "message": "kubelet is posting ready status. AppArmor enabled", - "reason": "KubeletReady", - "status": "True", - "type": "Ready" - } - ], - "daemonEndpoints": { - "kubeletEndpoint": { - "Port": 10250 - } - }, - "images": [ - { - "names": [ - "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", - "docker.io/secoresearch/fuseki:latest" - ], - "sizeBytes": 338692664 - }, - { - "names": [ - "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", - "docker.io/parklize/fuseki:v1" - ], - "sizeBytes": 105267079 - }, - { - "names": [ - "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", - "quay.io/minio/directpv:v4.0.10" - ], - "sizeBytes": 81808104 - }, - { - "names": [ - "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", - "quay.io/sustainable_computing_io/kepler:release-0.7.2" - ], - "sizeBytes": 81436291 - }, - { - "names": [ - "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", - "docker.io/flannel/flannel:v0.22.3" - ], - "sizeBytes": 27017673 - }, - { - "names": [ - "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", - "registry.k8s.io/kube-proxy:v1.28.2" - ], - "sizeBytes": 24558871 - }, - { - "names": [ - "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", - "docker.io/mrlhansen/idrac_exporter:c9f9bfe" - ], - "sizeBytes": 15698654 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", - "docker.io/prom/node-exporter:latest" - ], - "sizeBytes": 11777109 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter:v1.6.1" - ], - "sizeBytes": 11725464 - }, - { - "names": [ - "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" - ], - "sizeBytes": 10387407 - }, - { - "names": [ - "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" - ], - "sizeBytes": 9430791 - }, - { - "names": [ - "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", - "docker.io/flannel/flannel-cni-plugin:v1.2.0" - ], - "sizeBytes": 3879095 - }, - { - "names": [ - "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", - "registry.k8s.io/pause:3.8" - ], - "sizeBytes": 311286 - } - ], - "nodeInfo": { - "architecture": "amd64", - "bootID": "ea66505f-3e15-460d-ad82-fd2fc97c1432", - "containerRuntimeVersion": "containerd://1.7.2", - "kernelVersion": "5.15.0-1045-intel-iotg", - "kubeProxyVersion": "v1.28.2", - "kubeletVersion": "v1.28.2", - "machineID": "79dbccdc147df34789aa05026368a3d5", - "operatingSystem": "linux", - "osImage": "Ubuntu 22.04.3 LTS", - "systemUUID": "0921a780-efab-11ed-8aa2-3c0ee53f2d00" + "conditions": [ + { + "lastHeartbeatTime": "2023-12-11T11:09:38Z", + "lastTransitionTime": "2023-12-11T11:09:38Z", + "message": "Flannel is running on this node", + "reason": "FlannelIsUp", + "status": "False", + "type": "NetworkUnavailable" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:09Z", + "lastTransitionTime": "2023-12-11T11:09:24Z", + "message": "kubelet has sufficient memory available", + "reason": "KubeletHasSufficientMemory", + "status": "False", + "type": "MemoryPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:09Z", + "lastTransitionTime": "2023-12-11T11:09:24Z", + "message": "kubelet has no disk pressure", + "reason": "KubeletHasNoDiskPressure", + "status": "False", + "type": "DiskPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:09Z", + "lastTransitionTime": "2023-12-11T11:09:24Z", + "message": "kubelet has sufficient PID available", + "reason": "KubeletHasSufficientPID", + "status": "False", + "type": "PIDPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:09Z", + "lastTransitionTime": "2023-12-11T11:09:24Z", + "message": "kubelet is posting ready status. AppArmor enabled", + "reason": "KubeletReady", + "status": "True", + "type": "Ready" + } + ], + "daemonEndpoints": { + "kubeletEndpoint": { + "Port": 10250 } + }, + "images": [ + { + "names": [ + "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", + "docker.io/secoresearch/fuseki:latest" + ], + "sizeBytes": 338692664 + }, + { + "names": [ + "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", + "docker.io/parklize/fuseki:v1" + ], + "sizeBytes": 105267079 + }, + { + "names": [ + "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", + "quay.io/minio/directpv:v4.0.10" + ], + "sizeBytes": 81808104 + }, + { + "names": [ + "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", + "quay.io/sustainable_computing_io/kepler:release-0.7.2" + ], + "sizeBytes": 81436291 + }, + { + "names": [ + "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", + "docker.io/flannel/flannel:v0.22.3" + ], + "sizeBytes": 27017673 + }, + { + "names": [ + "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", + "registry.k8s.io/kube-proxy:v1.28.2" + ], + "sizeBytes": 24558871 + }, + { + "names": [ + "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", + "docker.io/mrlhansen/idrac_exporter:c9f9bfe" + ], + "sizeBytes": 15698654 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", + "docker.io/prom/node-exporter:latest" + ], + "sizeBytes": 11777109 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter:v1.6.1" + ], + "sizeBytes": 11725464 + }, + { + "names": [ + "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" + ], + "sizeBytes": 10387407 + }, + { + "names": [ + "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" + ], + "sizeBytes": 9430791 + }, + { + "names": [ + "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", + "docker.io/flannel/flannel-cni-plugin:v1.2.0" + ], + "sizeBytes": 3879095 + }, + { + "names": [ + "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", + "registry.k8s.io/pause:3.8" + ], + "sizeBytes": 311286 + } + ], + "nodeInfo": { + "architecture": "amd64", + "bootID": "ea66505f-3e15-460d-ad82-fd2fc97c1432", + "containerRuntimeVersion": "containerd://1.7.2", + "kernelVersion": "5.15.0-1045-intel-iotg", + "kubeProxyVersion": "v1.28.2", + "kubeletVersion": "v1.28.2", + "machineID": "79dbccdc147df34789aa05026368a3d5", + "operatingSystem": "linux", + "osImage": "Ubuntu 22.04.3 LTS", + "systemUUID": "0921a780-efab-11ed-8aa2-3c0ee53f2d00" } + } + }, + { + "apiVersion": "v1", + "kind": "Node", + "metadata": { + "annotations": { + "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker03\"}", + "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"26:f9:d4:34:5a:92\"}", + "flannel.alpha.coreos.com/backend-type": "vxlan", + "flannel.alpha.coreos.com/kube-subnet-manager": "true", + "flannel.alpha.coreos.com/public-ip": "10.14.2.1", + "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", + "node.alpha.kubernetes.io/ttl": "0", + "volumes.kubernetes.io/controller-managed-attach-detach": "true" + }, + "creationTimestamp": "2023-10-20T11:01:41Z", + "labels": { + "beta.kubernetes.io/arch": "amd64", + "beta.kubernetes.io/os": "linux", + "directpv.min.io/identity": "directpv-min-io", + "directpv.min.io/node": "glaciation-worker03", + "directpv.min.io/rack": "default", + "directpv.min.io/region": "default", + "directpv.min.io/zone": "default", + "kubernetes.io/arch": "amd64", + "kubernetes.io/hostname": "glaciation-worker03", + "kubernetes.io/os": "linux" + }, + "name": "glaciation-worker03", + "resourceVersion": "19338847", + "uid": "1d345c49-37c4-43c7-8416-d05caea3f5ec" }, - { - "apiVersion": "v1", - "kind": "Node", - "metadata": { - "annotations": { - "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker03\"}", - "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"26:f9:d4:34:5a:92\"}", - "flannel.alpha.coreos.com/backend-type": "vxlan", - "flannel.alpha.coreos.com/kube-subnet-manager": "true", - "flannel.alpha.coreos.com/public-ip": "10.14.2.1", - "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", - "node.alpha.kubernetes.io/ttl": "0", - "volumes.kubernetes.io/controller-managed-attach-detach": "true" - }, - "creationTimestamp": "2023-10-20T11:01:41Z", - "labels": { - "beta.kubernetes.io/arch": "amd64", - "beta.kubernetes.io/os": "linux", - "directpv.min.io/identity": "directpv-min-io", - "directpv.min.io/node": "glaciation-worker03", - "directpv.min.io/rack": "default", - "directpv.min.io/region": "default", - "directpv.min.io/zone": "default", - "kubernetes.io/arch": "amd64", - "kubernetes.io/hostname": "glaciation-worker03", - "kubernetes.io/os": "linux" - }, - "name": "glaciation-worker03", - "resourceVersion": "19338847", - "uid": "1d345c49-37c4-43c7-8416-d05caea3f5ec" + "spec": { + "podCIDR": "10.244.3.0/24", + "podCIDRs": [ + "10.244.3.0/24" + ] + }, + "status": { + "addresses": [ + { + "address": "10.14.2.1", + "type": "InternalIP" + }, + { + "address": "glaciation-worker03", + "type": "Hostname" + } + ], + "allocatable": { + "cpu": "6", + "ephemeral-storage": "226751816534", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "16060524Ki", + "pods": "110" }, - "spec": { - "podCIDR": "10.244.3.0/24", - "podCIDRs": [ - "10.244.3.0/24" - ] + "capacity": { + "cpu": "6", + "ephemeral-storage": "246041468Ki", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "16162924Ki", + "pods": "110" }, - "status": { - "addresses": [ - { - "address": "10.14.2.1", - "type": "InternalIP" - }, - { - "address": "glaciation-worker03", - "type": "Hostname" - } - ], - "allocatable": { - "cpu": "6", - "ephemeral-storage": "226751816534", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "16060524Ki", - "pods": "110" - }, - "capacity": { - "cpu": "6", - "ephemeral-storage": "246041468Ki", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "16162924Ki", - "pods": "110" - }, - "conditions": [ - { - "lastHeartbeatTime": "2023-12-11T11:10:25Z", - "lastTransitionTime": "2023-12-11T11:10:25Z", - "message": "Flannel is running on this node", - "reason": "FlannelIsUp", - "status": "False", - "type": "NetworkUnavailable" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:15Z", - "lastTransitionTime": "2023-12-11T11:09:27Z", - "message": "kubelet has sufficient memory available", - "reason": "KubeletHasSufficientMemory", - "status": "False", - "type": "MemoryPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:15Z", - "lastTransitionTime": "2023-12-11T11:09:27Z", - "message": "kubelet has no disk pressure", - "reason": "KubeletHasNoDiskPressure", - "status": "False", - "type": "DiskPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:15Z", - "lastTransitionTime": "2023-12-11T11:09:27Z", - "message": "kubelet has sufficient PID available", - "reason": "KubeletHasSufficientPID", - "status": "False", - "type": "PIDPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:15Z", - "lastTransitionTime": "2023-12-11T11:09:27Z", - "message": "kubelet is posting ready status. AppArmor enabled", - "reason": "KubeletReady", - "status": "True", - "type": "Ready" - } - ], - "daemonEndpoints": { - "kubeletEndpoint": { - "Port": 10250 - } - }, - "images": [ - { - "names": [ - "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", - "docker.io/secoresearch/fuseki:latest" - ], - "sizeBytes": 338692664 - }, - { - "names": [ - "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", - "docker.io/parklize/fuseki:v1" - ], - "sizeBytes": 105267079 - }, - { - "names": [ - "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", - "quay.io/minio/directpv:v4.0.10" - ], - "sizeBytes": 81808104 - }, - { - "names": [ - "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", - "quay.io/sustainable_computing_io/kepler:release-0.7.2" - ], - "sizeBytes": 81436291 - }, - { - "names": [ - "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", - "docker.io/flannel/flannel:v0.22.3" - ], - "sizeBytes": 27017673 - }, - { - "names": [ - "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", - "registry.k8s.io/kube-proxy:v1.28.2" - ], - "sizeBytes": 24558871 - }, - { - "names": [ - "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", - "docker.io/mrlhansen/idrac_exporter:c9f9bfe" - ], - "sizeBytes": 15698654 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", - "docker.io/prom/node-exporter:latest" - ], - "sizeBytes": 11777109 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter:v1.6.1" - ], - "sizeBytes": 11725464 - }, - { - "names": [ - "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" - ], - "sizeBytes": 10387407 - }, - { - "names": [ - "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" - ], - "sizeBytes": 9430791 - }, - { - "names": [ - "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", - "docker.io/flannel/flannel-cni-plugin:v1.2.0" - ], - "sizeBytes": 3879095 - }, - { - "names": [ - "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", - "registry.k8s.io/pause:3.8" - ], - "sizeBytes": 311286 - } - ], - "nodeInfo": { - "architecture": "amd64", - "bootID": "48cc0076-25b6-41e5-942e-2403afbe6051", - "containerRuntimeVersion": "containerd://1.7.2", - "kernelVersion": "5.15.0-1045-intel-iotg", - "kubeProxyVersion": "v1.28.2", - "kubeletVersion": "v1.28.2", - "machineID": "79dbccdc147df34789aa05026368a3d5", - "operatingSystem": "linux", - "osImage": "Ubuntu 22.04.3 LTS", - "systemUUID": "0d32d280-ef75-11ed-a27a-ec18ae197b00" + "conditions": [ + { + "lastHeartbeatTime": "2023-12-11T11:10:25Z", + "lastTransitionTime": "2023-12-11T11:10:25Z", + "message": "Flannel is running on this node", + "reason": "FlannelIsUp", + "status": "False", + "type": "NetworkUnavailable" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:15Z", + "lastTransitionTime": "2023-12-11T11:09:27Z", + "message": "kubelet has sufficient memory available", + "reason": "KubeletHasSufficientMemory", + "status": "False", + "type": "MemoryPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:15Z", + "lastTransitionTime": "2023-12-11T11:09:27Z", + "message": "kubelet has no disk pressure", + "reason": "KubeletHasNoDiskPressure", + "status": "False", + "type": "DiskPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:15Z", + "lastTransitionTime": "2023-12-11T11:09:27Z", + "message": "kubelet has sufficient PID available", + "reason": "KubeletHasSufficientPID", + "status": "False", + "type": "PIDPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:15Z", + "lastTransitionTime": "2023-12-11T11:09:27Z", + "message": "kubelet is posting ready status. AppArmor enabled", + "reason": "KubeletReady", + "status": "True", + "type": "Ready" } + ], + "daemonEndpoints": { + "kubeletEndpoint": { + "Port": 10250 + } + }, + "images": [ + { + "names": [ + "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", + "docker.io/secoresearch/fuseki:latest" + ], + "sizeBytes": 338692664 + }, + { + "names": [ + "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", + "docker.io/parklize/fuseki:v1" + ], + "sizeBytes": 105267079 + }, + { + "names": [ + "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", + "quay.io/minio/directpv:v4.0.10" + ], + "sizeBytes": 81808104 + }, + { + "names": [ + "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", + "quay.io/sustainable_computing_io/kepler:release-0.7.2" + ], + "sizeBytes": 81436291 + }, + { + "names": [ + "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", + "docker.io/flannel/flannel:v0.22.3" + ], + "sizeBytes": 27017673 + }, + { + "names": [ + "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", + "registry.k8s.io/kube-proxy:v1.28.2" + ], + "sizeBytes": 24558871 + }, + { + "names": [ + "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", + "docker.io/mrlhansen/idrac_exporter:c9f9bfe" + ], + "sizeBytes": 15698654 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", + "docker.io/prom/node-exporter:latest" + ], + "sizeBytes": 11777109 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter:v1.6.1" + ], + "sizeBytes": 11725464 + }, + { + "names": [ + "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" + ], + "sizeBytes": 10387407 + }, + { + "names": [ + "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" + ], + "sizeBytes": 9430791 + }, + { + "names": [ + "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", + "docker.io/flannel/flannel-cni-plugin:v1.2.0" + ], + "sizeBytes": 3879095 + }, + { + "names": [ + "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", + "registry.k8s.io/pause:3.8" + ], + "sizeBytes": 311286 + } + ], + "nodeInfo": { + "architecture": "amd64", + "bootID": "48cc0076-25b6-41e5-942e-2403afbe6051", + "containerRuntimeVersion": "containerd://1.7.2", + "kernelVersion": "5.15.0-1045-intel-iotg", + "kubeProxyVersion": "v1.28.2", + "kubeletVersion": "v1.28.2", + "machineID": "79dbccdc147df34789aa05026368a3d5", + "operatingSystem": "linux", + "osImage": "Ubuntu 22.04.3 LTS", + "systemUUID": "0d32d280-ef75-11ed-a27a-ec18ae197b00" } + } + }, + { + "apiVersion": "v1", + "kind": "Node", + "metadata": { + "annotations": { + "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker04\"}", + "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"d6:21:01:83:b3:7d\"}", + "flannel.alpha.coreos.com/backend-type": "vxlan", + "flannel.alpha.coreos.com/kube-subnet-manager": "true", + "flannel.alpha.coreos.com/public-ip": "10.14.2.2", + "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", + "node.alpha.kubernetes.io/ttl": "0", + "volumes.kubernetes.io/controller-managed-attach-detach": "true" + }, + "creationTimestamp": "2023-10-20T11:01:49Z", + "labels": { + "beta.kubernetes.io/arch": "amd64", + "beta.kubernetes.io/os": "linux", + "directpv.min.io/identity": "directpv-min-io", + "directpv.min.io/node": "glaciation-worker04", + "directpv.min.io/rack": "default", + "directpv.min.io/region": "default", + "directpv.min.io/zone": "default", + "kubernetes.io/arch": "amd64", + "kubernetes.io/hostname": "glaciation-worker04", + "kubernetes.io/os": "linux" + }, + "name": "glaciation-worker04", + "resourceVersion": "19338873", + "uid": "ff9ed64f-7e0b-4e5f-ac93-b027d82e5c90" + }, + "spec": { + "podCIDR": "10.244.4.0/24", + "podCIDRs": [ + "10.244.4.0/24" + ] }, - { - "apiVersion": "v1", - "kind": "Node", - "metadata": { - "annotations": { - "csi.volume.kubernetes.io/nodeid": "{\"directpv-min-io\":\"glaciation-worker04\"}", - "flannel.alpha.coreos.com/backend-data": "{\"VNI\":1,\"VtepMAC\":\"d6:21:01:83:b3:7d\"}", - "flannel.alpha.coreos.com/backend-type": "vxlan", - "flannel.alpha.coreos.com/kube-subnet-manager": "true", - "flannel.alpha.coreos.com/public-ip": "10.14.2.2", - "kubeadm.alpha.kubernetes.io/cri-socket": "unix:///var/run/containerd/containerd.sock", - "node.alpha.kubernetes.io/ttl": "0", - "volumes.kubernetes.io/controller-managed-attach-detach": "true" - }, - "creationTimestamp": "2023-10-20T11:01:49Z", - "labels": { - "beta.kubernetes.io/arch": "amd64", - "beta.kubernetes.io/os": "linux", - "directpv.min.io/identity": "directpv-min-io", - "directpv.min.io/node": "glaciation-worker04", - "directpv.min.io/rack": "default", - "directpv.min.io/region": "default", - "directpv.min.io/zone": "default", - "kubernetes.io/arch": "amd64", - "kubernetes.io/hostname": "glaciation-worker04", - "kubernetes.io/os": "linux" - }, - "name": "glaciation-worker04", - "resourceVersion": "19338873", - "uid": "ff9ed64f-7e0b-4e5f-ac93-b027d82e5c90" + "status": { + "addresses": [ + { + "address": "10.14.2.2", + "type": "InternalIP" + }, + { + "address": "glaciation-worker04", + "type": "Hostname" + } + ], + "allocatable": { + "cpu": "6", + "ephemeral-storage": "226751816534", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "16060520Ki", + "pods": "110" }, - "spec": { - "podCIDR": "10.244.4.0/24", - "podCIDRs": [ - "10.244.4.0/24" - ] + "capacity": { + "cpu": "6", + "ephemeral-storage": "246041468Ki", + "hugepages-1Gi": "0", + "hugepages-2Mi": "0", + "memory": "16162920Ki", + "pods": "110" }, - "status": { - "addresses": [ - { - "address": "10.14.2.2", - "type": "InternalIP" - }, - { - "address": "glaciation-worker04", - "type": "Hostname" - } - ], - "allocatable": { - "cpu": "6", - "ephemeral-storage": "226751816534", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "16060520Ki", - "pods": "110" - }, - "capacity": { - "cpu": "6", - "ephemeral-storage": "246041468Ki", - "hugepages-1Gi": "0", - "hugepages-2Mi": "0", - "memory": "16162920Ki", - "pods": "110" - }, - "conditions": [ - { - "lastHeartbeatTime": "2023-12-11T11:10:17Z", - "lastTransitionTime": "2023-12-11T11:10:17Z", - "message": "Flannel is running on this node", - "reason": "FlannelIsUp", - "status": "False", - "type": "NetworkUnavailable" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:26Z", - "lastTransitionTime": "2023-10-24T11:40:51Z", - "message": "kubelet has sufficient memory available", - "reason": "KubeletHasSufficientMemory", - "status": "False", - "type": "MemoryPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:26Z", - "lastTransitionTime": "2023-10-24T11:40:51Z", - "message": "kubelet has no disk pressure", - "reason": "KubeletHasNoDiskPressure", - "status": "False", - "type": "DiskPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:26Z", - "lastTransitionTime": "2023-10-24T11:40:51Z", - "message": "kubelet has sufficient PID available", - "reason": "KubeletHasSufficientPID", - "status": "False", - "type": "PIDPressure" - }, - { - "lastHeartbeatTime": "2024-03-01T14:16:26Z", - "lastTransitionTime": "2023-10-24T11:40:51Z", - "message": "kubelet is posting ready status. AppArmor enabled", - "reason": "KubeletReady", - "status": "True", - "type": "Ready" - } - ], - "daemonEndpoints": { - "kubeletEndpoint": { - "Port": 10250 - } - }, - "images": [ - { - "names": [ - "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", - "docker.io/secoresearch/fuseki:latest" - ], - "sizeBytes": 338692664 - }, - { - "names": [ - "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", - "docker.io/parklize/fuseki:v1" - ], - "sizeBytes": 105267079 - }, - { - "names": [ - "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", - "quay.io/minio/directpv:v4.0.10" - ], - "sizeBytes": 81808104 - }, - { - "names": [ - "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", - "quay.io/sustainable_computing_io/kepler:release-0.7.2" - ], - "sizeBytes": 81436291 - }, - { - "names": [ - "quay.io/minio/csi-provisioner@sha256:7b5c070ec70d30b0895d91b10c39a0e6cc81c18e0d1566c77aeff2a3587fa316" - ], - "sizeBytes": 28202105 - }, - { - "names": [ - "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", - "docker.io/flannel/flannel:v0.22.3" - ], - "sizeBytes": 27017673 - }, - { - "names": [ - "quay.io/minio/csi-resizer@sha256:819f68a4daf75acec336302843f303cf360d4941249f9f5019ffbb690c8ac7c0" - ], - "sizeBytes": 26524686 - }, - { - "names": [ - "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", - "registry.k8s.io/kube-proxy:v1.28.2" - ], - "sizeBytes": 24558871 - }, - { - "names": [ - "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", - "docker.io/mrlhansen/idrac_exporter:c9f9bfe" - ], - "sizeBytes": 15698654 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", - "docker.io/prom/node-exporter:latest" - ], - "sizeBytes": 11777109 - }, - { - "names": [ - "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", - "quay.io/prometheus/node-exporter:v1.6.1" - ], - "sizeBytes": 11725464 - }, - { - "names": [ - "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" - ], - "sizeBytes": 10387407 - }, - { - "names": [ - "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" - ], - "sizeBytes": 9430791 - }, - { - "names": [ - "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", - "docker.io/flannel/flannel-cni-plugin:v1.2.0" - ], - "sizeBytes": 3879095 - }, - { - "names": [ - "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", - "registry.k8s.io/pause:3.8" - ], - "sizeBytes": 311286 - } - ], - "nodeInfo": { - "architecture": "amd64", - "bootID": "a5909870-9362-4cc9-84b5-d27651f84831", - "containerRuntimeVersion": "containerd://1.7.2", - "kernelVersion": "5.15.0-1045-intel-iotg", - "kubeProxyVersion": "v1.28.2", - "kubeletVersion": "v1.28.2", - "machineID": "79dbccdc147df34789aa05026368a3d5", - "operatingSystem": "linux", - "osImage": "Ubuntu 22.04.3 LTS", - "systemUUID": "30a06080-effb-11ed-a4ff-0c4bbde04000" + "conditions": [ + { + "lastHeartbeatTime": "2023-12-11T11:10:17Z", + "lastTransitionTime": "2023-12-11T11:10:17Z", + "message": "Flannel is running on this node", + "reason": "FlannelIsUp", + "status": "False", + "type": "NetworkUnavailable" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:26Z", + "lastTransitionTime": "2023-10-24T11:40:51Z", + "message": "kubelet has sufficient memory available", + "reason": "KubeletHasSufficientMemory", + "status": "False", + "type": "MemoryPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:26Z", + "lastTransitionTime": "2023-10-24T11:40:51Z", + "message": "kubelet has no disk pressure", + "reason": "KubeletHasNoDiskPressure", + "status": "False", + "type": "DiskPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:26Z", + "lastTransitionTime": "2023-10-24T11:40:51Z", + "message": "kubelet has sufficient PID available", + "reason": "KubeletHasSufficientPID", + "status": "False", + "type": "PIDPressure" + }, + { + "lastHeartbeatTime": "2024-03-01T14:16:26Z", + "lastTransitionTime": "2023-10-24T11:40:51Z", + "message": "kubelet is posting ready status. AppArmor enabled", + "reason": "KubeletReady", + "status": "True", + "type": "Ready" + } + ], + "daemonEndpoints": { + "kubeletEndpoint": { + "Port": 10250 + } + }, + "images": [ + { + "names": [ + "docker.io/secoresearch/fuseki@sha256:e9211abdecaa1d8f7dea86b33adac9f0c1497c1f0ba4e64ab70f2f099b7e0154", + "docker.io/secoresearch/fuseki:latest" + ], + "sizeBytes": 338692664 + }, + { + "names": [ + "docker.io/parklize/fuseki@sha256:133f05910938eb6d488f1cc90aeb8707078f8bd6cd058ad45d7cdf2fca926d50", + "docker.io/parklize/fuseki:v1" + ], + "sizeBytes": 105267079 + }, + { + "names": [ + "quay.io/minio/directpv@sha256:68c6f0be55dcf4b55aad04c5f61378b03223c45c3af724b35bc2e1a84f22d2f8", + "quay.io/minio/directpv:v4.0.10" + ], + "sizeBytes": 81808104 + }, + { + "names": [ + "quay.io/sustainable_computing_io/kepler@sha256:8bdfb2c53426e0ea5a51941e316ae4db84bca63b03ef9e952053bfa53d4d2683", + "quay.io/sustainable_computing_io/kepler:release-0.7.2" + ], + "sizeBytes": 81436291 + }, + { + "names": [ + "quay.io/minio/csi-provisioner@sha256:7b5c070ec70d30b0895d91b10c39a0e6cc81c18e0d1566c77aeff2a3587fa316" + ], + "sizeBytes": 28202105 + }, + { + "names": [ + "docker.io/flannel/flannel@sha256:34585231b69718efc4f926ebca734659f01221554f37a925d9a1190bb16e5b91", + "docker.io/flannel/flannel:v0.22.3" + ], + "sizeBytes": 27017673 + }, + { + "names": [ + "quay.io/minio/csi-resizer@sha256:819f68a4daf75acec336302843f303cf360d4941249f9f5019ffbb690c8ac7c0" + ], + "sizeBytes": 26524686 + }, + { + "names": [ + "registry.k8s.io/kube-proxy@sha256:41c8f92d1cd571e0e36af431f35c78379f84f5daf5b85d43014a9940d697afcf", + "registry.k8s.io/kube-proxy:v1.28.2" + ], + "sizeBytes": 24558871 + }, + { + "names": [ + "docker.io/mrlhansen/idrac_exporter@sha256:ce5818638d3649ed5826ec1a0e23444ae182ec4d14d1d1d32b84c3aee92778db", + "docker.io/mrlhansen/idrac_exporter:c9f9bfe" + ], + "sizeBytes": 15698654 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:4cb2b9019f1757be8482419002cb7afe028fdba35d47958829e4cfeaf6246d80", + "docker.io/prom/node-exporter:latest" + ], + "sizeBytes": 11777109 + }, + { + "names": [ + "docker.io/prom/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter@sha256:81f94e50ea37a88dfee849d0f4acad25b96b397061f59e5095905f6bc5829637", + "quay.io/prometheus/node-exporter:v1.6.1" + ], + "sizeBytes": 11725464 + }, + { + "names": [ + "quay.io/minio/csi-node-driver-registrar@sha256:c805fdc166761218dc9478e7ac8e0ad0e42ad442269e75608823da3eb761e67e" + ], + "sizeBytes": 10387407 + }, + { + "names": [ + "quay.io/minio/livenessprobe@sha256:f3bc9a84f149cd7362e4bd0ae8cd90b26ad020c2591bfe19e63ff97aacf806c3" + ], + "sizeBytes": 9430791 + }, + { + "names": [ + "docker.io/flannel/flannel-cni-plugin@sha256:ca6779c6ad63b77af8a00151cefc08578241197b9a6fe144b0e55484bc52b852", + "docker.io/flannel/flannel-cni-plugin:v1.2.0" + ], + "sizeBytes": 3879095 + }, + { + "names": [ + "registry.k8s.io/pause@sha256:9001185023633d17a2f98ff69b6ff2615b8ea02a825adffa40422f51dfdcde9d", + "registry.k8s.io/pause:3.8" + ], + "sizeBytes": 311286 } + ], + "nodeInfo": { + "architecture": "amd64", + "bootID": "a5909870-9362-4cc9-84b5-d27651f84831", + "containerRuntimeVersion": "containerd://1.7.2", + "kernelVersion": "5.15.0-1045-intel-iotg", + "kubeProxyVersion": "v1.28.2", + "kubeletVersion": "v1.28.2", + "machineID": "79dbccdc147df34789aa05026368a3d5", + "operatingSystem": "linux", + "osImage": "Ubuntu 22.04.3 LTS", + "systemUUID": "30a06080-effb-11ed-a4ff-0c4bbde04000" } } - ], - "kind": "List", - "metadata": { - "resourceVersion": "" } -} +] diff --git a/app/k8s_transform/cluster_transformer.py b/app/k8s_transform/cluster_transformer.py index 61597be..8c41d4b 100644 --- a/app/k8s_transform/cluster_transformer.py +++ b/app/k8s_transform/cluster_transformer.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional import yaml from jsonpath_ng.ext import parse @@ -21,23 +21,26 @@ def __init__( self.nodes = nodes def transform(self, _: TransformationContext) -> None: - config_str: str = self.get_cluster_configuration() - config = yaml.safe_load(config_str) - cluster_id = self.get_cluster_id(config) + config_str = self.get_cluster_configuration() + if config_str: + config = yaml.safe_load(config_str) + cluster_id = self.get_cluster_id(config) + else: + cluster_id = IRI(self.CLUSTER_PREFIX, "Unknown") self.add_work_producing_resource(cluster_id, "KubernetesCluster") - for node in self.get_nodes(): + for node in self.nodes: self.write_node_reference(cluster_id, node) - def get_cluster_configuration(self) -> str: - return str(parse("$.data.ClusterConfiguration").find(self.source)[0].value) + def get_cluster_configuration(self) -> Optional[str]: + config_match = parse("$.data.ClusterConfiguration").find(self.source) + if len(config_match) == 0: + return None + return str(config_match[0].value) def get_cluster_id(self, config: Dict[str, Any]) -> IRI: cluster_name = parse("$.clusterName").find(config)[0].value return IRI(self.CLUSTER_PREFIX, self.escape(cluster_name)) - def get_nodes(self) -> List[Dict[str, Any]]: - return list(parse("$.items").find(self.nodes)[0].value) - def write_node_reference(self, cluster_id: IRI, node: Dict[str, Any]) -> None: node_id = self.get_node_id(node) self.sink.add_relation(cluster_id, self.HAS_SUBRESOURCE, node_id) From 12faa97c4dc35b3e0d7c3d3a1383916802b9be83 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 13 Jun 2024 15:11:09 +0200 Subject: [PATCH 28/61] HHT-669: avoiding creating unnecessary property sets --- app/kg/inmemory_graph.py | 7 ++-- app/kg/iri.py | 12 +++---- app/kg/test_inmemory_graph.py | 33 +++++++++++++++++++ .../pod_metric_transformer.py | 5 ++- 4 files changed, 47 insertions(+), 10 deletions(-) diff --git a/app/kg/inmemory_graph.py b/app/kg/inmemory_graph.py index f075288..36091da 100644 --- a/app/kg/inmemory_graph.py +++ b/app/kg/inmemory_graph.py @@ -14,12 +14,13 @@ class GraphNode: def __init__(self, node_id: IRI): self.id = node_id - self.properties = {} - self.meta_properties = {} + self.properties = dict() + self.meta_properties = dict() def add_property(self, predicate: IRI, value: Literal) -> None: if predicate in self.properties: - self.add_property_collection(predicate, {value}) + if self.properties[predicate] != value: + self.add_property_collection(predicate, {value}) else: self.properties[predicate] = value diff --git a/app/kg/iri.py b/app/kg/iri.py index 0f0cb80..bf5dde8 100644 --- a/app/kg/iri.py +++ b/app/kg/iri.py @@ -13,7 +13,7 @@ def __init__(self, prefix: str, value: str): def __eq__(self, other: Any) -> bool: if not isinstance(other, IRI): - return NotImplemented + raise NotImplementedError if self is other: return True @@ -27,15 +27,15 @@ def __gt__(self, other: Any) -> bool: return True return False else: - return NotImplemented + raise NotImplementedError def __lt__(self, other: Any) -> bool: if isinstance(other, IRI): try: return not self.__gt__(other) and not self.__eq__(other) except TypeError: - return NotImplemented - return NotImplemented + raise NotImplementedError + raise NotImplementedError def __le__(self, other: Any) -> bool: r = self.__lt__(other) @@ -44,7 +44,7 @@ def __le__(self, other: Any) -> bool: try: return self.__eq__(other) except TypeError: - return NotImplemented + raise NotImplementedError def __ge__(self, other: Any) -> bool: r = self.__gt__(other) @@ -53,7 +53,7 @@ def __ge__(self, other: Any) -> bool: try: return self.__eq__(other) except TypeError: - return NotImplemented + raise NotImplementedError def __hash__(self) -> int: res = 7 diff --git a/app/kg/test_inmemory_graph.py b/app/kg/test_inmemory_graph.py index 662f4b7..fa55edd 100644 --- a/app/kg/test_inmemory_graph.py +++ b/app/kg/test_inmemory_graph.py @@ -106,3 +106,36 @@ def test_equality(self): IRI("", "id1"), IRI("", "rel3"), Literal("1", Literal.TYPE_INT) ) self.assertEqual(graph1, graph2) + + def test_add_dublicates_should_not_create_sets(self): + graph = InMemoryGraph() + graph.add_relation(IRI("", "id1"), IRI("", "rel1"), IRI("", "id2")) + graph.add_relation(IRI("", "id1"), IRI("", "rel1"), IRI("", "id2")) + + graph.add_property( + IRI("", "id1"), IRI("", "rel2"), Literal("test1", Literal.TYPE_STRING) + ) + graph.add_property( + IRI("", "id1"), IRI("", "rel2"), Literal("test1", Literal.TYPE_STRING) + ) + graph.add_meta_property( + IRI("", "id1"), IRI("", "rel3"), Literal("test2", Literal.TYPE_STRING) + ) + graph.add_meta_property( + IRI("", "id1"), IRI("", "rel3"), Literal("test2", Literal.TYPE_STRING) + ) + + self.assertEqual( + graph.get_node_relations(IRI("", "id1")), + {IRI("", "rel1"): {IRI("", "id2")}}, + ) + + self.assertEqual( + graph.get_node_properties(IRI("", "id1")), + {IRI("", "rel2"): Literal("test1", Literal.TYPE_STRING)}, + ) + + self.assertEqual( + graph.get_node_meta_properties(IRI("", "id1")), + {IRI("", "rel3"): Literal("test2", Literal.TYPE_STRING)}, + ) diff --git a/app/metric_transform/pod_metric_transformer.py b/app/metric_transform/pod_metric_transformer.py index c47414c..ec146ec 100644 --- a/app/metric_transform/pod_metric_transformer.py +++ b/app/metric_transform/pod_metric_transformer.py @@ -18,7 +18,10 @@ def __init__(self, metrics: List[Tuple[MetricQuery, MetricValue]], sink: Graph): def transform(self, context: TransformationContext) -> None: for query, result in self.metrics: pod_id = self.get_pod_id(result.resource_id) - measurement_id = pod_id.dot(query.measurement_id) + parent_resource_id = ( + pod_id.dot(query.subresource) if query.subresource else pod_id + ) + measurement_id = parent_resource_id.dot(query.measurement_id) property_id = IRI(self.GLACIATION_PREFIX, query.property) unit_id = IRI(self.GLACIATION_PREFIX, query.unit) source_id = IRI(self.GLACIATION_PREFIX, query.source) From e96fad5efa53b0b6e883b251c6802e4be130d091 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 13 Jun 2024 15:28:17 +0200 Subject: [PATCH 29/61] HHT-669: KG Slice assembler and tests --- .../snapshot/minimal/metric_pods.yaml | 8 +- .../slice_glaciation-test-master01_80.jsonld | 237 +++++++++--------- .../snapshot/multinode/metric_pods.yaml | 32 +-- app/core/kg_builder.py | 2 +- app/core/slice_for_node_strategy.py | 7 +- app/core/test_kg_builder.py | 28 +-- app/core/test_snapshot_base.py | 40 ++- .../__fixture__/deployment.jsonld | 50 ++-- .../__fixture__/deployment.turtle | 136 +++++----- app/k8s_transform/__fixture__/pod1.jsonld | 2 +- app/k8s_transform/__fixture__/pod1.turtle | 6 +- app/k8s_transform/__fixture__/pod2.jsonld | 2 +- app/k8s_transform/__fixture__/pod2.turtle | 6 +- app/k8s_transform/__fixture__/pod3.jsonld | 2 +- app/k8s_transform/__fixture__/pod3.turtle | 6 +- .../__fixture__/replicaset.jsonld | 79 +++--- .../__fixture__/replicaset.turtle | 52 ++-- .../__fixture__/statefulset.jsonld | 61 +++-- .../__fixture__/statefulset.turtle | 40 +-- app/k8s_transform/transformer_base.py | 8 +- 20 files changed, 405 insertions(+), 399 deletions(-) diff --git a/app/core/__fixture__/snapshot/minimal/metric_pods.yaml b/app/core/__fixture__/snapshot/minimal/metric_pods.yaml index 9b82833..c54a9a9 100644 --- a/app/core/__fixture__/snapshot/minimal/metric_pods.yaml +++ b/app/core/__fixture__/snapshot/minimal/metric_pods.yaml @@ -8,7 +8,7 @@ result_parser: SimpleResultParser value: metric_id: pod_cpu_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 41.0 - query: @@ -21,7 +21,7 @@ result_parser: SimpleResultParser value: metric_id: pod_ram_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 42.0 - query: @@ -34,7 +34,7 @@ result_parser: SimpleResultParser value: metric_id: pod_eph_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 43.0 - query: @@ -47,6 +47,6 @@ result_parser: SimpleResultParser value: metric_id: pod_net_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 43.0 diff --git a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld index 74faf27..e8097c6 100644 --- a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld +++ b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld @@ -6,55 +6,10 @@ "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ - { - "@id": "cluster:coredns-787d4945fb-l85r5", - "@type": "glc:WorkProducingResource", - "glc:hasID": "cluster:coredns-787d4945fb-l85r5", - "glc:hasMeasurement": { - "@id": "cluster:coredns-787d4945fb-l85r5.Usage", - "@type": "glc:Measurement", - "glc:hasDescription": { - "@set": [ - "Usage" - ] - }, - "glc:hasTimestamp": { - "@set": [ - 17100500 - ] - }, - "glc:hasValue": { - "@set": [ - 41.0, - 42.0, - 43.0 - ] - }, - "glc:hasID": "cluster:coredns-787d4945fb-l85r5.Usage", - "glc:measuredIn": { - "@set": [ - "glc:bytes", - "glc:coreseconds" - ] - }, - "glc:relatesToMeasurementProperty": { - "@set": [ - "glc:CPU.Usage", - "glc:Network.Usage", - "glc:RAM.Usage", - "glc:Storage.Usage" - ] - } - } - }, { "@id": "cluster:glaciation-test-master01", "@type": "glc:WorkProducingResource", - "glc:hasDescription": { - "@set": [ - "KubernetesWorkerNode" - ] - }, + "glc:hasDescription": "KubernetesWorkerNode", "glc:hasID": "cluster:glaciation-test-master01", "glc:hasMeasurement": { "@id": "cluster:glaciation-test-master01.Energy.Index", @@ -202,6 +157,50 @@ "k8s:hasName": "coredns-787d4945fb-l85r5", "glc:consumes": "cluster:glaciation-test-master01", "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 41.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.Network.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.Network.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Network.Usage" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + }, "glc:hasSubResource": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns" }, { @@ -310,11 +309,14 @@ "glc:hasID": "glc:cAdvisor", "glc:makes": { "@set": [ - "cluster:coredns-787d4945fb-l85r5.Usage", "cluster:glaciation-test-master01.CPU.Usage", "cluster:glaciation-test-master01.Network.Usage", "cluster:glaciation-test-master01.RAM.Usage", - "cluster:glaciation-test-master01.Storage.Usage" + "cluster:glaciation-test-master01.Storage.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.CPU.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.Network.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.RAM.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.Storage.Usage" ] } }, @@ -324,159 +326,148 @@ "glc:hasID": "glc:coreseconds" }, { - "@id": "cluster:coredns-787d4945fb.1", - "@type": "glc:AssignedTask", - "glc:hasDescription": "ReplicaSet", - "glc:hasConstraint": { - "@set": [ - { - "@id": "cluster:coredns-787d4945fb.1.CPU.Allocated", - "@type": "glc:SoftConstraint", - "glc:hasDescription": "CPU.Allocated", - "glc:maxValue": 0.1, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns-787d4945fb.1.CPU.Allocated", - "glc:measuredIn": "glc:Core" - }, - { - "@id": "cluster:coredns-787d4945fb.1.RAM.Allocated", - "@type": "glc:SoftConstraint", - "glc:hasDescription": "RAM.Allocated", - "glc:maxValue": 73400320.0, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns-787d4945fb.1.RAM.Allocated", - "glc:measuredIn": "glc:Bytes" - }, - { - "@id": "cluster:coredns-787d4945fb.1.RAM.Capacity", - "@type": "glc:HardConstraint", - "glc:hasDescription": "RAM.Capacity", - "glc:maxValue": 178257920.0, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns-787d4945fb.1.RAM.Capacity", - "glc:measuredIn": "glc:Bytes" - } - ] - }, - "glc:hasID": "cluster:coredns-787d4945fb.1" - }, - { - "@id": "cluster:coredns-787d4945fb.None", - "@type": "glc:AssignedTask", - "glc:hasDescription": "ReplicaSet", - "glc:makes": "cluster:kube-system.coredns-787d4945fb-l85r5" - }, - { - "@id": "cluster:coredns.1", + "@id": "cluster:coredns", "@type": "glc:AssignedTask", "glc:hasDescription": "Deployment", "glc:hasConstraint": { "@set": [ { - "@id": "cluster:coredns.1.CPU.Allocated", + "@id": "cluster:coredns.CPU.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "CPU.Allocated", "glc:maxValue": 0.30000000000000004, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.CPU.Allocated", + "glc:hasID": "cluster:coredns.CPU.Allocated", "glc:measuredIn": "glc:Core" }, { - "@id": "cluster:coredns.1.Energy.Allocated", + "@id": "cluster:coredns.Energy.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "Energy.Allocated", "glc:maxValue": 100.0, "glc:hasAspect": "glc:Power", - "glc:hasID": "cluster:coredns.1.Energy.Allocated", + "glc:hasID": "cluster:coredns.Energy.Allocated", "glc:measuredIn": "glc:Milliwatt" }, { - "@id": "cluster:coredns.1.Energy.Capacity", + "@id": "cluster:coredns.Energy.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "Energy.Capacity", "glc:maxValue": 100.0, "glc:hasAspect": "glc:Power", - "glc:hasID": "cluster:coredns.1.Energy.Capacity", + "glc:hasID": "cluster:coredns.Energy.Capacity", "glc:measuredIn": "glc:Milliwatt" }, { - "@id": "cluster:coredns.1.GPU.Allocated", + "@id": "cluster:coredns.GPU.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "GPU.Allocated", "glc:maxValue": 101.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.GPU.Allocated", + "glc:hasID": "cluster:coredns.GPU.Allocated", "glc:measuredIn": "glc:Core" }, { - "@id": "cluster:coredns.1.GPU.Capacity", + "@id": "cluster:coredns.GPU.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "GPU.Capacity", "glc:maxValue": 101.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.GPU.Capacity", + "glc:hasID": "cluster:coredns.GPU.Capacity", "glc:measuredIn": "glc:Core" }, { - "@id": "cluster:coredns.1.Network.Allocated", + "@id": "cluster:coredns.Network.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "Network.Allocated", "glc:maxValue": 1010.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.Network.Allocated", + "glc:hasID": "cluster:coredns.Network.Allocated", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.1.Network.Capacity", + "@id": "cluster:coredns.Network.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "Network.Capacity", "glc:maxValue": 1010.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.Network.Capacity", + "glc:hasID": "cluster:coredns.Network.Capacity", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.1.RAM.Allocated", + "@id": "cluster:coredns.RAM.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "RAM.Allocated", "glc:maxValue": 220200960.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.RAM.Allocated", + "glc:hasID": "cluster:coredns.RAM.Allocated", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.1.RAM.Capacity", + "@id": "cluster:coredns.RAM.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "RAM.Capacity", "glc:maxValue": 534773760.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.RAM.Capacity", + "glc:hasID": "cluster:coredns.RAM.Capacity", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.1.Storage.Allocated", + "@id": "cluster:coredns.Storage.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "Storage.Allocated", "glc:maxValue": 100500.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.1.Storage.Allocated", + "glc:hasID": "cluster:coredns.Storage.Allocated", "glc:measuredIn": "glc:Bytes" } ] }, - "glc:hasID": "cluster:coredns.1" - }, - { - "@id": "cluster:coredns.None", - "@type": "glc:AssignedTask", - "glc:hasDescription": "Deployment", - "glc:hasSubTask": "cluster:coredns-787d4945fb.1" + "glc:hasID": "cluster:coredns", + "glc:hasSubTask": { + "@id": "cluster:coredns-787d4945fb", + "@type": "glc:AssignedTask", + "glc:hasDescription": "ReplicaSet", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:coredns-787d4945fb.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.1, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns-787d4945fb.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 73400320.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns-787d4945fb.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 178257920.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:coredns-787d4945fb", + "glc:makes": "cluster:kube-system.coredns-787d4945fb-l85r5" + } }, { - "@id": "cluster:crd-resource.None", + "@id": "cluster:crd-resource", "@type": "glc:AssignedTask", "glc:hasDescription": "CRD", - "glc:makes": "cluster:coredns.1" + "glc:makes": "cluster:coredns" }, { "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.Status", @@ -497,8 +488,8 @@ "@type": "glc:Scheduler", "glc:assigns": { "@set": [ - "cluster:coredns-787d4945fb.1", - "cluster:coredns.1" + "cluster:coredns", + "cluster:coredns-787d4945fb" ] }, "glc:hasID": "glc:default-scheduler", diff --git a/app/core/__fixture__/snapshot/multinode/metric_pods.yaml b/app/core/__fixture__/snapshot/multinode/metric_pods.yaml index bc3352d..52eaaf2 100644 --- a/app/core/__fixture__/snapshot/multinode/metric_pods.yaml +++ b/app/core/__fixture__/snapshot/multinode/metric_pods.yaml @@ -9,7 +9,7 @@ result_parser: SimpleResultParser value: metric_id: pod_cpu_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 41.0 - query: @@ -22,7 +22,7 @@ result_parser: SimpleResultParser value: metric_id: pod_ram_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 42.0 - query: @@ -35,7 +35,7 @@ result_parser: SimpleResultParser value: metric_id: pod_eph_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 43.0 - query: @@ -48,7 +48,7 @@ result_parser: SimpleResultParser value: metric_id: pod_net_usage - resource_id: coredns-787d4945fb-l85r5 + resource_id: kube-system.coredns-787d4945fb-l85r5 timestamp: 17100500 value: 44.0 # kube-flannel-ds-848v8 @@ -62,7 +62,7 @@ result_parser: SimpleResultParser value: metric_id: pod_cpu_usage - resource_id: kube-flannel-ds-848v8 + resource_id: kube-flannel.kube-flannel-ds-848v8 timestamp: 17100500 value: 51.0 - query: @@ -75,7 +75,7 @@ result_parser: SimpleResultParser value: metric_id: pod_ram_usage - resource_id: kube-flannel-ds-848v8 + resource_id: kube-flannel.kube-flannel-ds-848v8 timestamp: 17100500 value: 52.0 - query: @@ -88,7 +88,7 @@ result_parser: SimpleResultParser value: metric_id: pod_eph_usage - resource_id: kube-flannel-ds-848v8 + resource_id: kube-flannel.kube-flannel-ds-848v8 timestamp: 17100500 value: 53.0 - query: @@ -101,7 +101,7 @@ result_parser: SimpleResultParser value: metric_id: pod_net_usage - resource_id: kube-flannel-ds-848v8 + resource_id: kube-flannel.kube-flannel-ds-848v8 timestamp: 17100500 value: 54.0 # init-vault-cluster-cbqhq @@ -115,7 +115,7 @@ result_parser: SimpleResultParser value: metric_id: pod_cpu_usage - resource_id: init-vault-cluster-cbqhq + resource_id: vault.init-vault-cluster-cbqhq timestamp: 17100500 value: 61.0 - query: @@ -128,7 +128,7 @@ result_parser: SimpleResultParser value: metric_id: pod_ram_usage - resource_id: init-vault-cluster-cbqhq + resource_id: vault.init-vault-cluster-cbqhq timestamp: 17100500 value: 62.0 - query: @@ -141,7 +141,7 @@ result_parser: SimpleResultParser value: metric_id: pod_eph_usage - resource_id: init-vault-cluster-cbqhq + resource_id: vault.init-vault-cluster-cbqhq timestamp: 17100500 value: 63.0 - query: @@ -154,7 +154,7 @@ result_parser: SimpleResultParser value: metric_id: pod_net_usage - resource_id: init-vault-cluster-cbqhq + resource_id: vault.init-vault-cluster-cbqhq timestamp: 17100500 value: 64.0 # glaciation-pool-0-0 @@ -168,7 +168,7 @@ result_parser: SimpleResultParser value: metric_id: pod_cpu_usage - resource_id: glaciation-pool-0-0 + resource_id: minio-tenant.glaciation-pool-0-0 timestamp: 17100500 value: 71.0 - query: @@ -181,7 +181,7 @@ result_parser: SimpleResultParser value: metric_id: pod_ram_usage - resource_id: glaciation-pool-0-0 + resource_id: minio-tenant.glaciation-pool-0-0 timestamp: 17100500 value: 72.0 - query: @@ -194,7 +194,7 @@ result_parser: SimpleResultParser value: metric_id: pod_eph_usage - resource_id: glaciation-pool-0-0 + resource_id: minio-tenant.glaciation-pool-0-0 timestamp: 17100500 value: 73.0 - query: @@ -207,6 +207,6 @@ result_parser: SimpleResultParser value: metric_id: pod_net_usage - resource_id: glaciation-pool-0-0 + resource_id: minio-tenant.glaciation-pool-0-0 timestamp: 17100500 value: 74.0 diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 7c87138..fd016f3 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -57,7 +57,7 @@ def __init__( async def run(self) -> None: while self.running.is_set(): - now = 0 + now = 1 ( cluster_snapshot, pod_metrics, diff --git a/app/core/slice_for_node_strategy.py b/app/core/slice_for_node_strategy.py index 72c9789..47b5afd 100644 --- a/app/core/slice_for_node_strategy.py +++ b/app/core/slice_for_node_strategy.py @@ -108,6 +108,11 @@ def add_metrics( slice_metrics.node_metrics.extend(metrics) for pod in slice_resources.pods: - pod_name = self.get_resource_name(pod) + pod_name = self.get_pod_name(pod) metrics = src_metrics.get_pod_metrics_by_resource(pod_name) slice_metrics.pod_metrics.extend(metrics) + + def get_pod_name(self, resource: Dict[str, Any]) -> str: + name = parse("$.metadata.name").find(resource)[0].value + namespace = parse("$.metadata.namespace").find(resource)[0].value + return f"{namespace}.{name}" diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index 186c2ce..39394d0 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -27,6 +27,7 @@ class KGBuilderTest(TestCase, TestGraphFixture, SnapshotTestBase): settings: KGBuilderSettings def setUp(self) -> None: + self.maxDiff = None self.client = MockMetadataServiceClient() self.influxdb_client = MockInfluxDBClient() self.queue = AsyncQueue() @@ -39,36 +40,19 @@ def setUp(self) -> None: ) def test_build_minimal(self) -> None: - self.mock_minimal_inputs() + self.mock_inputs( + "minimal", self.k8s_client, self.influxdb_client, self.settings.queries + ) builder = self.create_builder() self.runner.run(self.run_builder(builder)) slice = self.wait_for_slice(2) - self.assertEqual(slice.timestamp, 0) + self.assertEqual(slice.timestamp, 1) self.assertEqual(slice.slice_id, KGSliceId("glaciation-test-master01", 80)) self.assertNotEqual(slice.graph, InMemoryGraph()) - - # TODO validate graph nodes and edges - - def mock_minimal_inputs(self) -> None: - resources = self.load_k8s_snapshot("minimal") - - self.k8s_client.mock_cluster(resources.cluster) - self.k8s_client.mock_daemonsets(resources.daemonsets) - self.k8s_client.mock_deployments(resources.deployments) - self.k8s_client.mock_jobs(resources.jobs) - self.k8s_client.mock_nodes(resources.nodes) - self.k8s_client.mock_pods(resources.pods) - self.k8s_client.mock_replicasets(resources.replicasets) - self.k8s_client.mock_statefullsets(resources.statefullsets) - - metrics = self.load_metric_snapshot("minimal") - for query, value in metrics.node_metrics: - self.influxdb_client.mock_query(query.query, [value]) - for query, value in metrics.pod_metrics: - self.influxdb_client.mock_query(query.query, [value]) + self.assert_graph(slice.graph, "minimal", slice.slice_id) def create_builder(self) -> KGBuilder: repository = KGRepository(self.client, self.get_jsonld_config()) diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index 19f71a7..7bd7df3 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -7,8 +7,11 @@ import yaml +from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient from app.clients.k8s.k8s_client import ResourceSnapshot -from app.core.metric_repository import MetricQuery +from app.clients.k8s.mock_k8s_client import MockK8SClient +from app.core.kg_builder import QuerySettings +from app.core.metric_repository import MetricQuery, ResultParserId from app.core.metric_value import MetricValue from app.core.types import KGSliceId, MetricSnapshot, SliceInputs from app.k8s_transform.upper_ontology_base import UpperOntologyBase @@ -22,6 +25,33 @@ class SnapshotTestBase: SNAPSHOT_ROOT: str = "app/core/__fixture__/snapshot" + def mock_inputs( + self, + identity: str, + k8s_client: MockK8SClient, + influxdb_client: MockInfluxDBClient, + settings: QuerySettings, + ) -> None: + resources = self.load_k8s_snapshot(identity) + + k8s_client.mock_cluster(resources.cluster) + k8s_client.mock_daemonsets(resources.daemonsets) + k8s_client.mock_deployments(resources.deployments) + k8s_client.mock_jobs(resources.jobs) + k8s_client.mock_nodes(resources.nodes) + k8s_client.mock_pods(resources.pods) + k8s_client.mock_replicasets(resources.replicasets) + k8s_client.mock_statefullsets(resources.statefullsets) + + metrics = self.load_metric_snapshot(identity) + for query, value in metrics.node_metrics: + influxdb_client.mock_query(query.query, [value]) + settings.node_queries.append(query) + + for query, value in metrics.pod_metrics: + influxdb_client.mock_query(query.query, [value]) + settings.pod_queries.append(query) + def get_inputs(self, identity: str) -> SliceInputs: resource_snapshot = self.load_k8s_snapshot(identity) metric_snapshot = self.load_metric_snapshot(identity) @@ -62,6 +92,7 @@ def load_metrics( result = [] for query_and_value in query_and_values: query = self.dataclass_from_dict(MetricQuery, query_and_value["query"]) + query.result_parser = ResultParserId.SIMPLE_RESULT_PARSER # TODO parse value = self.dataclass_from_dict(MetricValue, query_and_value["value"]) result.append((query, value)) return result @@ -70,7 +101,7 @@ def safe_load_yaml(self, file_path: str) -> Any: with FileIO(file_path) as f: return yaml.safe_load(f) - def load_json(self, file_path: str) -> Dict[str, Any]: + def load_jsonld(self, file_path: str) -> Dict[str, Any]: with FileIO(file_path) as f: return json.load(f) # type: ignore @@ -84,12 +115,11 @@ def dataclass_from_dict(self, klass, d): return d # Not a dataclass field def assert_graph(self, graph: Graph, snapshot_id: str, slice_id: KGSliceId) -> None: - file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/graph_{slice_id.node_ip}_{slice_id.port}.jsonld" - node_jsonld = self.load_json(file_path) + file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/slice_{slice_id.node_ip}_{slice_id.port}.jsonld" + node_jsonld = self.load_jsonld(file_path) buffer = StringIO() JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) - self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) # type: ignore def get_jsonld_config(self) -> JsonLDConfiguration: diff --git a/app/k8s_transform/__fixture__/deployment.jsonld b/app/k8s_transform/__fixture__/deployment.jsonld index cb878ea..9c493d6 100644 --- a/app/k8s_transform/__fixture__/deployment.jsonld +++ b/app/k8s_transform/__fixture__/deployment.jsonld @@ -32,115 +32,115 @@ "glc:hasID": "glc:Power" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb", + "@id": "cluster:coredns", "@type": "glc:AssignedTask", "glc:hasDescription": "Deployment", "glc:hasConstraint": { "@set": [ { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated", + "@id": "cluster:coredns.CPU.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "CPU.Allocated", "glc:maxValue": 0.30000000000000004, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated", + "glc:hasID": "cluster:coredns.CPU.Allocated", "glc:measuredIn": "glc:Core" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated", + "@id": "cluster:coredns.Energy.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "Energy.Allocated", "glc:maxValue": 100.0, "glc:hasAspect": "glc:Power", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated", + "glc:hasID": "cluster:coredns.Energy.Allocated", "glc:measuredIn": "glc:Milliwatt" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity", + "@id": "cluster:coredns.Energy.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "Energy.Capacity", "glc:maxValue": 100.0, "glc:hasAspect": "glc:Power", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity", + "glc:hasID": "cluster:coredns.Energy.Capacity", "glc:measuredIn": "glc:Milliwatt" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated", + "@id": "cluster:coredns.GPU.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "GPU.Allocated", "glc:maxValue": 101.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated", + "glc:hasID": "cluster:coredns.GPU.Allocated", "glc:measuredIn": "glc:Core" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity", + "@id": "cluster:coredns.GPU.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "GPU.Capacity", "glc:maxValue": 101.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity", + "glc:hasID": "cluster:coredns.GPU.Capacity", "glc:measuredIn": "glc:Core" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated", + "@id": "cluster:coredns.Network.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "Network.Allocated", "glc:maxValue": 1010.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated", + "glc:hasID": "cluster:coredns.Network.Allocated", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity", + "@id": "cluster:coredns.Network.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "Network.Capacity", "glc:maxValue": 1010.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity", + "glc:hasID": "cluster:coredns.Network.Capacity", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated", + "@id": "cluster:coredns.RAM.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "RAM.Allocated", "glc:maxValue": 220200960.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated", + "glc:hasID": "cluster:coredns.RAM.Allocated", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity", + "@id": "cluster:coredns.RAM.Capacity", "@type": "glc:HardConstraint", "glc:hasDescription": "RAM.Capacity", "glc:maxValue": 534773760.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity", + "glc:hasID": "cluster:coredns.RAM.Capacity", "glc:measuredIn": "glc:Bytes" }, { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated", + "@id": "cluster:coredns.Storage.Allocated", "@type": "glc:SoftConstraint", "glc:hasDescription": "Storage.Allocated", "glc:maxValue": 100500.0, "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated", + "glc:hasID": "cluster:coredns.Storage.Allocated", "glc:measuredIn": "glc:Bytes" } ] }, - "glc:hasID": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb" + "glc:hasID": "cluster:coredns" }, { - "@id": "cluster:crd-resource.930f2fc9-2b5e-46c7-a11f-4374fe55fafd", + "@id": "cluster:crd-resource", "@type": "glc:AssignedTask", "glc:hasDescription": "CRD", - "glc:makes": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb" + "glc:makes": "cluster:coredns" }, { "@id": "glc:default-scheduler", "@type": "glc:Scheduler", - "glc:assigns": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb", + "glc:assigns": "cluster:coredns", "glc:hasID": "glc:default-scheduler" } ] diff --git a/app/k8s_transform/__fixture__/deployment.turtle b/app/k8s_transform/__fixture__/deployment.turtle index b1317cf..08f2ea1 100644 --- a/app/k8s_transform/__fixture__/deployment.turtle +++ b/app/k8s_transform/__fixture__/deployment.turtle @@ -1,70 +1,70 @@ -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb rdf:type glc:AssignedTask . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb glc:hasDescription "Deployment" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb glc:hasConstraint (cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated) . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated rdf:type glc:SoftConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated glc:hasDescription "CPU.Allocated" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated glc:maxValue 0.30000000000000004^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.CPU.Allocated glc:measuredIn glc:Core . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated rdf:type glc:SoftConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated glc:hasDescription "Energy.Allocated" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated glc:maxValue 100.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated glc:hasAspect glc:Power . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Allocated glc:measuredIn glc:Milliwatt . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity rdf:type glc:HardConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity glc:hasDescription "Energy.Capacity" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity glc:maxValue 100.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity glc:hasAspect glc:Power . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Energy.Capacity glc:measuredIn glc:Milliwatt . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated rdf:type glc:SoftConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated glc:hasDescription "GPU.Allocated" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated glc:maxValue 101.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Allocated glc:measuredIn glc:Core . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity rdf:type glc:HardConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity glc:hasDescription "GPU.Capacity" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity glc:maxValue 101.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.GPU.Capacity glc:measuredIn glc:Core . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated rdf:type glc:SoftConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated glc:hasDescription "Network.Allocated" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated glc:maxValue 1010.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Allocated glc:measuredIn glc:Bytes . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity rdf:type glc:HardConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity glc:hasDescription "Network.Capacity" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity glc:maxValue 1010.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Network.Capacity glc:measuredIn glc:Bytes . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated rdf:type glc:SoftConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated glc:hasDescription "RAM.Allocated" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated glc:maxValue 220200960.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Allocated glc:measuredIn glc:Bytes . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity rdf:type glc:HardConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity glc:hasDescription "RAM.Capacity" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity glc:maxValue 534773760.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.RAM.Capacity glc:measuredIn glc:Bytes . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated rdf:type glc:SoftConstraint . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated glc:hasDescription "Storage.Allocated" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated glc:maxValue 100500.0^^ . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated glc:hasAspect glc:Performance . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated glc:hasID cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb.Storage.Allocated glc:measuredIn glc:Bytes . -cluster:crd-resource.930f2fc9-2b5e-46c7-a11f-4374fe55fafd rdf:type glc:AssignedTask . -cluster:crd-resource.930f2fc9-2b5e-46c7-a11f-4374fe55fafd glc:hasDescription "CRD" . -cluster:crd-resource.930f2fc9-2b5e-46c7-a11f-4374fe55fafd glc:makes cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb . +cluster:coredns rdf:type glc:AssignedTask . +cluster:coredns glc:hasDescription "Deployment" . +cluster:coredns glc:hasConstraint (cluster:coredns.CPU.Allocated cluster:coredns.Energy.Allocated cluster:coredns.Energy.Capacity cluster:coredns.GPU.Allocated cluster:coredns.GPU.Capacity cluster:coredns.Network.Allocated cluster:coredns.Network.Capacity cluster:coredns.RAM.Allocated cluster:coredns.RAM.Capacity cluster:coredns.Storage.Allocated) . +cluster:coredns glc:hasID cluster:coredns . +cluster:coredns.CPU.Allocated rdf:type glc:SoftConstraint . +cluster:coredns.CPU.Allocated glc:hasDescription "CPU.Allocated" . +cluster:coredns.CPU.Allocated glc:maxValue 0.30000000000000004^^ . +cluster:coredns.CPU.Allocated glc:hasAspect glc:Performance . +cluster:coredns.CPU.Allocated glc:hasID cluster:coredns.CPU.Allocated . +cluster:coredns.CPU.Allocated glc:measuredIn glc:Core . +cluster:coredns.Energy.Allocated rdf:type glc:SoftConstraint . +cluster:coredns.Energy.Allocated glc:hasDescription "Energy.Allocated" . +cluster:coredns.Energy.Allocated glc:maxValue 100.0^^ . +cluster:coredns.Energy.Allocated glc:hasAspect glc:Power . +cluster:coredns.Energy.Allocated glc:hasID cluster:coredns.Energy.Allocated . +cluster:coredns.Energy.Allocated glc:measuredIn glc:Milliwatt . +cluster:coredns.Energy.Capacity rdf:type glc:HardConstraint . +cluster:coredns.Energy.Capacity glc:hasDescription "Energy.Capacity" . +cluster:coredns.Energy.Capacity glc:maxValue 100.0^^ . +cluster:coredns.Energy.Capacity glc:hasAspect glc:Power . +cluster:coredns.Energy.Capacity glc:hasID cluster:coredns.Energy.Capacity . +cluster:coredns.Energy.Capacity glc:measuredIn glc:Milliwatt . +cluster:coredns.GPU.Allocated rdf:type glc:SoftConstraint . +cluster:coredns.GPU.Allocated glc:hasDescription "GPU.Allocated" . +cluster:coredns.GPU.Allocated glc:maxValue 101.0^^ . +cluster:coredns.GPU.Allocated glc:hasAspect glc:Performance . +cluster:coredns.GPU.Allocated glc:hasID cluster:coredns.GPU.Allocated . +cluster:coredns.GPU.Allocated glc:measuredIn glc:Core . +cluster:coredns.GPU.Capacity rdf:type glc:HardConstraint . +cluster:coredns.GPU.Capacity glc:hasDescription "GPU.Capacity" . +cluster:coredns.GPU.Capacity glc:maxValue 101.0^^ . +cluster:coredns.GPU.Capacity glc:hasAspect glc:Performance . +cluster:coredns.GPU.Capacity glc:hasID cluster:coredns.GPU.Capacity . +cluster:coredns.GPU.Capacity glc:measuredIn glc:Core . +cluster:coredns.Network.Allocated rdf:type glc:SoftConstraint . +cluster:coredns.Network.Allocated glc:hasDescription "Network.Allocated" . +cluster:coredns.Network.Allocated glc:maxValue 1010.0^^ . +cluster:coredns.Network.Allocated glc:hasAspect glc:Performance . +cluster:coredns.Network.Allocated glc:hasID cluster:coredns.Network.Allocated . +cluster:coredns.Network.Allocated glc:measuredIn glc:Bytes . +cluster:coredns.Network.Capacity rdf:type glc:HardConstraint . +cluster:coredns.Network.Capacity glc:hasDescription "Network.Capacity" . +cluster:coredns.Network.Capacity glc:maxValue 1010.0^^ . +cluster:coredns.Network.Capacity glc:hasAspect glc:Performance . +cluster:coredns.Network.Capacity glc:hasID cluster:coredns.Network.Capacity . +cluster:coredns.Network.Capacity glc:measuredIn glc:Bytes . +cluster:coredns.RAM.Allocated rdf:type glc:SoftConstraint . +cluster:coredns.RAM.Allocated glc:hasDescription "RAM.Allocated" . +cluster:coredns.RAM.Allocated glc:maxValue 220200960.0^^ . +cluster:coredns.RAM.Allocated glc:hasAspect glc:Performance . +cluster:coredns.RAM.Allocated glc:hasID cluster:coredns.RAM.Allocated . +cluster:coredns.RAM.Allocated glc:measuredIn glc:Bytes . +cluster:coredns.RAM.Capacity rdf:type glc:HardConstraint . +cluster:coredns.RAM.Capacity glc:hasDescription "RAM.Capacity" . +cluster:coredns.RAM.Capacity glc:maxValue 534773760.0^^ . +cluster:coredns.RAM.Capacity glc:hasAspect glc:Performance . +cluster:coredns.RAM.Capacity glc:hasID cluster:coredns.RAM.Capacity . +cluster:coredns.RAM.Capacity glc:measuredIn glc:Bytes . +cluster:coredns.Storage.Allocated rdf:type glc:SoftConstraint . +cluster:coredns.Storage.Allocated glc:hasDescription "Storage.Allocated" . +cluster:coredns.Storage.Allocated glc:maxValue 100500.0^^ . +cluster:coredns.Storage.Allocated glc:hasAspect glc:Performance . +cluster:coredns.Storage.Allocated glc:hasID cluster:coredns.Storage.Allocated . +cluster:coredns.Storage.Allocated glc:measuredIn glc:Bytes . +cluster:crd-resource rdf:type glc:AssignedTask . +cluster:crd-resource glc:hasDescription "CRD" . +cluster:crd-resource glc:makes cluster:coredns . glc:Bytes rdf:type glc:MeasurementUnit . glc:Bytes glc:hasID glc:Bytes . glc:Core rdf:type glc:MeasurementUnit . @@ -76,5 +76,5 @@ glc:Performance glc:hasID glc:Performance . glc:Power rdf:type glc:Aspect . glc:Power glc:hasID glc:Power . glc:default-scheduler rdf:type glc:Scheduler . -glc:default-scheduler glc:assigns cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb . +glc:default-scheduler glc:assigns cluster:coredns . glc:default-scheduler glc:hasID glc:default-scheduler . diff --git a/app/k8s_transform/__fixture__/pod1.jsonld b/app/k8s_transform/__fixture__/pod1.jsonld index e60dd3c..193d468 100644 --- a/app/k8s_transform/__fixture__/pod1.jsonld +++ b/app/k8s_transform/__fixture__/pod1.jsonld @@ -30,7 +30,7 @@ "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns" }, { - "@id": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685", + "@id": "cluster:coredns-787d4945fb", "@type": "glc:AssignedTask", "glc:hasDescription": "ReplicaSet", "glc:makes": "cluster:kube-system.coredns-787d4945fb-l85r5" diff --git a/app/k8s_transform/__fixture__/pod1.turtle b/app/k8s_transform/__fixture__/pod1.turtle index 63d18ad..1b53b3b 100644 --- a/app/k8s_transform/__fixture__/pod1.turtle +++ b/app/k8s_transform/__fixture__/pod1.turtle @@ -1,6 +1,6 @@ -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 rdf:type glc:AssignedTask . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 glc:hasDescription "ReplicaSet" . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 glc:makes cluster:kube-system.coredns-787d4945fb-l85r5 . +cluster:coredns-787d4945fb rdf:type glc:AssignedTask . +cluster:coredns-787d4945fb glc:hasDescription "ReplicaSet" . +cluster:coredns-787d4945fb glc:makes cluster:kube-system.coredns-787d4945fb-l85r5 . cluster:glaciation-test-master01 rdf:type glc:WorkProducingResource . cluster:glaciation-test-master01 glc:hasDescription "KubernetesWorkerNode" . cluster:glaciation-test-master01 glc:hasID cluster:glaciation-test-master01 . diff --git a/app/k8s_transform/__fixture__/pod2.jsonld b/app/k8s_transform/__fixture__/pod2.jsonld index 8fbdc54..f6b35eb 100644 --- a/app/k8s_transform/__fixture__/pod2.jsonld +++ b/app/k8s_transform/__fixture__/pod2.jsonld @@ -14,7 +14,7 @@ "glc:hasID": "cluster:tenant1.tenant1-pool-0-1" }, { - "@id": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2", + "@id": "cluster:tenant1-pool-0", "@type": "glc:AssignedTask", "glc:hasDescription": "StatefulSet", "glc:makes": "cluster:tenant1.tenant1-pool-0-1" diff --git a/app/k8s_transform/__fixture__/pod2.turtle b/app/k8s_transform/__fixture__/pod2.turtle index 71b96a3..d004b0c 100644 --- a/app/k8s_transform/__fixture__/pod2.turtle +++ b/app/k8s_transform/__fixture__/pod2.turtle @@ -1,6 +1,6 @@ -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 rdf:type glc:AssignedTask . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 glc:hasDescription "StatefulSet" . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 glc:makes cluster:tenant1.tenant1-pool-0-1 . +cluster:tenant1-pool-0 rdf:type glc:AssignedTask . +cluster:tenant1-pool-0 glc:hasDescription "StatefulSet" . +cluster:tenant1-pool-0 glc:makes cluster:tenant1.tenant1-pool-0-1 . cluster:tenant1.tenant1-pool-0-1 rdf:type glc:WorkProducingResource . cluster:tenant1.tenant1-pool-0-1 glc:hasDescription "Pod" . cluster:tenant1.tenant1-pool-0-1 k8s:hasName "tenant1-pool-0-1" . diff --git a/app/k8s_transform/__fixture__/pod3.jsonld b/app/k8s_transform/__fixture__/pod3.jsonld index 1c6ce37..d931509 100644 --- a/app/k8s_transform/__fixture__/pod3.jsonld +++ b/app/k8s_transform/__fixture__/pod3.jsonld @@ -52,7 +52,7 @@ "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.kube-flannel" }, { - "@id": "cluster:kube-flannel-ds.84cf7062-bb28-49c9-89b6-5bb34014f178", + "@id": "cluster:kube-flannel-ds", "@type": "glc:AssignedTask", "glc:hasDescription": "DaemonSet", "glc:makes": "cluster:kube-flannel.kube-flannel-ds-848v8" diff --git a/app/k8s_transform/__fixture__/pod3.turtle b/app/k8s_transform/__fixture__/pod3.turtle index 7b361d0..bdcb4d6 100644 --- a/app/k8s_transform/__fixture__/pod3.turtle +++ b/app/k8s_transform/__fixture__/pod3.turtle @@ -1,9 +1,9 @@ cluster:glaciation-worker04 rdf:type glc:WorkProducingResource . cluster:glaciation-worker04 glc:hasDescription "KubernetesWorkerNode" . cluster:glaciation-worker04 glc:hasID cluster:glaciation-worker04 . -cluster:kube-flannel-ds.84cf7062-bb28-49c9-89b6-5bb34014f178 rdf:type glc:AssignedTask . -cluster:kube-flannel-ds.84cf7062-bb28-49c9-89b6-5bb34014f178 glc:hasDescription "DaemonSet" . -cluster:kube-flannel-ds.84cf7062-bb28-49c9-89b6-5bb34014f178 glc:makes cluster:kube-flannel.kube-flannel-ds-848v8 . +cluster:kube-flannel-ds rdf:type glc:AssignedTask . +cluster:kube-flannel-ds glc:hasDescription "DaemonSet" . +cluster:kube-flannel-ds glc:makes cluster:kube-flannel.kube-flannel-ds-848v8 . cluster:kube-flannel.kube-flannel-ds-848v8 rdf:type glc:WorkProducingResource . cluster:kube-flannel.kube-flannel-ds-848v8 glc:hasDescription "Pod" . cluster:kube-flannel.kube-flannel-ds-848v8 k8s:hasName "kube-flannel-ds-848v8" . diff --git a/app/k8s_transform/__fixture__/replicaset.jsonld b/app/k8s_transform/__fixture__/replicaset.jsonld index e0238ee..cf8c7e0 100644 --- a/app/k8s_transform/__fixture__/replicaset.jsonld +++ b/app/k8s_transform/__fixture__/replicaset.jsonld @@ -22,52 +22,51 @@ "glc:hasID": "glc:Performance" }, { - "@id": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685", - "@type": "glc:AssignedTask", - "glc:hasDescription": "ReplicaSet", - "glc:hasConstraint": { - "@set": [ - { - "@id": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated", - "@type": "glc:SoftConstraint", - "glc:hasDescription": "CPU.Allocated", - "glc:maxValue": 0.1, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated", - "glc:measuredIn": "glc:Core" - }, - { - "@id": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated", - "@type": "glc:SoftConstraint", - "glc:hasDescription": "RAM.Allocated", - "glc:maxValue": 73400320.0, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated", - "glc:measuredIn": "glc:Bytes" - }, - { - "@id": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity", - "@type": "glc:HardConstraint", - "glc:hasDescription": "RAM.Capacity", - "glc:maxValue": 178257920.0, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity", - "glc:measuredIn": "glc:Bytes" - } - ] - }, - "glc:hasID": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685" - }, - { - "@id": "cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb", + "@id": "cluster:coredns", "@type": "glc:AssignedTask", "glc:hasDescription": "Deployment", - "glc:hasSubTask": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685" + "glc:hasSubTask": { + "@id": "cluster:coredns-787d4945fb", + "@type": "glc:AssignedTask", + "glc:hasDescription": "ReplicaSet", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:coredns-787d4945fb.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.1, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns-787d4945fb.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 73400320.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns-787d4945fb.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 178257920.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:coredns-787d4945fb" + } }, { "@id": "glc:default-scheduler", "@type": "glc:Scheduler", - "glc:assigns": "cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685", + "glc:assigns": "cluster:coredns-787d4945fb", "glc:hasID": "glc:default-scheduler" } ] diff --git a/app/k8s_transform/__fixture__/replicaset.turtle b/app/k8s_transform/__fixture__/replicaset.turtle index 39aba41..4d795f0 100644 --- a/app/k8s_transform/__fixture__/replicaset.turtle +++ b/app/k8s_transform/__fixture__/replicaset.turtle @@ -1,28 +1,28 @@ -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 rdf:type glc:AssignedTask . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 glc:hasDescription "ReplicaSet" . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 glc:hasConstraint (cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity) . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 glc:hasID cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated rdf:type glc:SoftConstraint . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated glc:hasDescription "CPU.Allocated" . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated glc:maxValue 0.1^^ . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated glc:hasAspect glc:Performance . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated glc:hasID cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.CPU.Allocated glc:measuredIn glc:Core . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated rdf:type glc:SoftConstraint . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated glc:hasDescription "RAM.Allocated" . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated glc:maxValue 73400320.0^^ . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated glc:hasAspect glc:Performance . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated glc:hasID cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Allocated glc:measuredIn glc:Bytes . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity rdf:type glc:HardConstraint . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity glc:hasDescription "RAM.Capacity" . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity glc:maxValue 178257920.0^^ . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity glc:hasAspect glc:Performance . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity glc:hasID cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity . -cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685.RAM.Capacity glc:measuredIn glc:Bytes . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb rdf:type glc:AssignedTask . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb glc:hasDescription "Deployment" . -cluster:coredns.3c85dfea-a3a3-4cda-9145-c2fa46de66fb glc:hasSubTask cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 . +cluster:coredns rdf:type glc:AssignedTask . +cluster:coredns glc:hasDescription "Deployment" . +cluster:coredns glc:hasSubTask cluster:coredns-787d4945fb . +cluster:coredns-787d4945fb rdf:type glc:AssignedTask . +cluster:coredns-787d4945fb glc:hasDescription "ReplicaSet" . +cluster:coredns-787d4945fb glc:hasConstraint (cluster:coredns-787d4945fb.CPU.Allocated cluster:coredns-787d4945fb.RAM.Allocated cluster:coredns-787d4945fb.RAM.Capacity) . +cluster:coredns-787d4945fb glc:hasID cluster:coredns-787d4945fb . +cluster:coredns-787d4945fb.CPU.Allocated rdf:type glc:SoftConstraint . +cluster:coredns-787d4945fb.CPU.Allocated glc:hasDescription "CPU.Allocated" . +cluster:coredns-787d4945fb.CPU.Allocated glc:maxValue 0.1^^ . +cluster:coredns-787d4945fb.CPU.Allocated glc:hasAspect glc:Performance . +cluster:coredns-787d4945fb.CPU.Allocated glc:hasID cluster:coredns-787d4945fb.CPU.Allocated . +cluster:coredns-787d4945fb.CPU.Allocated glc:measuredIn glc:Core . +cluster:coredns-787d4945fb.RAM.Allocated rdf:type glc:SoftConstraint . +cluster:coredns-787d4945fb.RAM.Allocated glc:hasDescription "RAM.Allocated" . +cluster:coredns-787d4945fb.RAM.Allocated glc:maxValue 73400320.0^^ . +cluster:coredns-787d4945fb.RAM.Allocated glc:hasAspect glc:Performance . +cluster:coredns-787d4945fb.RAM.Allocated glc:hasID cluster:coredns-787d4945fb.RAM.Allocated . +cluster:coredns-787d4945fb.RAM.Allocated glc:measuredIn glc:Bytes . +cluster:coredns-787d4945fb.RAM.Capacity rdf:type glc:HardConstraint . +cluster:coredns-787d4945fb.RAM.Capacity glc:hasDescription "RAM.Capacity" . +cluster:coredns-787d4945fb.RAM.Capacity glc:maxValue 178257920.0^^ . +cluster:coredns-787d4945fb.RAM.Capacity glc:hasAspect glc:Performance . +cluster:coredns-787d4945fb.RAM.Capacity glc:hasID cluster:coredns-787d4945fb.RAM.Capacity . +cluster:coredns-787d4945fb.RAM.Capacity glc:measuredIn glc:Bytes . glc:Bytes rdf:type glc:MeasurementUnit . glc:Bytes glc:hasID glc:Bytes . glc:Core rdf:type glc:MeasurementUnit . @@ -30,5 +30,5 @@ glc:Core glc:hasID glc:Core . glc:Performance rdf:type glc:Aspect . glc:Performance glc:hasID glc:Performance . glc:default-scheduler rdf:type glc:Scheduler . -glc:default-scheduler glc:assigns cluster:coredns-787d4945fb.83c2b537-9bcd-4fac-b954-ea140c7d3685 . +glc:default-scheduler glc:assigns cluster:coredns-787d4945fb . glc:default-scheduler glc:hasID glc:default-scheduler . diff --git a/app/k8s_transform/__fixture__/statefulset.jsonld b/app/k8s_transform/__fixture__/statefulset.jsonld index f56dad6..8b1772f 100644 --- a/app/k8s_transform/__fixture__/statefulset.jsonld +++ b/app/k8s_transform/__fixture__/statefulset.jsonld @@ -22,43 +22,42 @@ "glc:hasID": "glc:Performance" }, { - "@id": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2", - "@type": "glc:AssignedTask", - "glc:hasDescription": "StatefulSet", - "glc:hasConstraint": { - "@set": [ - { - "@id": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated", - "@type": "glc:SoftConstraint", - "glc:hasDescription": "CPU.Allocated", - "glc:maxValue": 3.0, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated", - "glc:measuredIn": "glc:Core" - }, - { - "@id": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated", - "@type": "glc:SoftConstraint", - "glc:hasDescription": "RAM.Allocated", - "glc:maxValue": 7516192768.0, - "glc:hasAspect": "glc:Performance", - "glc:hasID": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated", - "glc:measuredIn": "glc:Bytes" - } - ] - }, - "glc:hasID": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2" - }, - { - "@id": "cluster:tenant1.930f2fc9-2b5e-46c7-a11f-4374fe55fafd", + "@id": "cluster:tenant1", "@type": "glc:AssignedTask", "glc:hasDescription": "Tenant", - "glc:makes": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2" + "glc:makes": { + "@id": "cluster:tenant1-pool-0", + "@type": "glc:AssignedTask", + "glc:hasDescription": "StatefulSet", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:tenant1-pool-0.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 3.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:tenant1-pool-0.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:tenant1-pool-0.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 7516192768.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:tenant1-pool-0.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:tenant1-pool-0" + } }, { "@id": "glc:default-scheduler", "@type": "glc:Scheduler", - "glc:assigns": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2", + "glc:assigns": "cluster:tenant1-pool-0", "glc:hasID": "glc:default-scheduler" } ] diff --git a/app/k8s_transform/__fixture__/statefulset.turtle b/app/k8s_transform/__fixture__/statefulset.turtle index 26f5d3a..f3da0e5 100644 --- a/app/k8s_transform/__fixture__/statefulset.turtle +++ b/app/k8s_transform/__fixture__/statefulset.turtle @@ -1,22 +1,22 @@ -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 rdf:type glc:AssignedTask . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 glc:hasDescription "StatefulSet" . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 glc:hasConstraint (cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated) . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 glc:hasID cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated rdf:type glc:SoftConstraint . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated glc:hasDescription "CPU.Allocated" . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated glc:maxValue 3.0^^ . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated glc:hasAspect glc:Performance . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated glc:hasID cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.CPU.Allocated glc:measuredIn glc:Core . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated rdf:type glc:SoftConstraint . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated glc:hasDescription "RAM.Allocated" . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated glc:maxValue 7516192768.0^^ . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated glc:hasAspect glc:Performance . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated glc:hasID cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated . -cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2.RAM.Allocated glc:measuredIn glc:Bytes . -cluster:tenant1.930f2fc9-2b5e-46c7-a11f-4374fe55fafd rdf:type glc:AssignedTask . -cluster:tenant1.930f2fc9-2b5e-46c7-a11f-4374fe55fafd glc:hasDescription "Tenant" . -cluster:tenant1.930f2fc9-2b5e-46c7-a11f-4374fe55fafd glc:makes cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 . +cluster:tenant1 rdf:type glc:AssignedTask . +cluster:tenant1 glc:hasDescription "Tenant" . +cluster:tenant1 glc:makes cluster:tenant1-pool-0 . +cluster:tenant1-pool-0 rdf:type glc:AssignedTask . +cluster:tenant1-pool-0 glc:hasDescription "StatefulSet" . +cluster:tenant1-pool-0 glc:hasConstraint (cluster:tenant1-pool-0.CPU.Allocated cluster:tenant1-pool-0.RAM.Allocated) . +cluster:tenant1-pool-0 glc:hasID cluster:tenant1-pool-0 . +cluster:tenant1-pool-0.CPU.Allocated rdf:type glc:SoftConstraint . +cluster:tenant1-pool-0.CPU.Allocated glc:hasDescription "CPU.Allocated" . +cluster:tenant1-pool-0.CPU.Allocated glc:maxValue 3.0^^ . +cluster:tenant1-pool-0.CPU.Allocated glc:hasAspect glc:Performance . +cluster:tenant1-pool-0.CPU.Allocated glc:hasID cluster:tenant1-pool-0.CPU.Allocated . +cluster:tenant1-pool-0.CPU.Allocated glc:measuredIn glc:Core . +cluster:tenant1-pool-0.RAM.Allocated rdf:type glc:SoftConstraint . +cluster:tenant1-pool-0.RAM.Allocated glc:hasDescription "RAM.Allocated" . +cluster:tenant1-pool-0.RAM.Allocated glc:maxValue 7516192768.0^^ . +cluster:tenant1-pool-0.RAM.Allocated glc:hasAspect glc:Performance . +cluster:tenant1-pool-0.RAM.Allocated glc:hasID cluster:tenant1-pool-0.RAM.Allocated . +cluster:tenant1-pool-0.RAM.Allocated glc:measuredIn glc:Bytes . glc:Bytes rdf:type glc:MeasurementUnit . glc:Bytes glc:hasID glc:Bytes . glc:Core rdf:type glc:MeasurementUnit . @@ -24,5 +24,5 @@ glc:Core glc:hasID glc:Core . glc:Performance rdf:type glc:Aspect . glc:Performance glc:hasID glc:Performance . glc:default-scheduler rdf:type glc:Scheduler . -glc:default-scheduler glc:assigns cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2 . +glc:default-scheduler glc:assigns cluster:tenant1-pool-0 . glc:default-scheduler glc:hasID glc:default-scheduler . diff --git a/app/k8s_transform/transformer_base.py b/app/k8s_transform/transformer_base.py index d94ac08..6ccf2e0 100644 --- a/app/k8s_transform/transformer_base.py +++ b/app/k8s_transform/transformer_base.py @@ -51,10 +51,9 @@ def transform(self, context: TransformationContext) -> None: raise NotImplementedError def get_reference_id(self, reference: Dict[str, Any]) -> Tuple[IRI, str]: - name = reference.get("name") - uid = reference.get("uid") + name = reference["name"] resource_type = reference["kind"] - resource_id = IRI(self.CLUSTER_PREFIX, f"{name}.{uid}") + resource_id = IRI(self.CLUSTER_PREFIX, name) return resource_id, resource_type def write_tuple(self, name: IRI, property: IRI, query: str) -> None: @@ -113,8 +112,7 @@ def normalize(self, value: str) -> str: def get_id(self) -> IRI: name = parse("$.metadata.name").find(self.source)[0].value - uid = parse("$.metadata.uid").find(self.source)[0].value - resource_id = IRI(self.CLUSTER_PREFIX, f"{name}.{uid}") + resource_id = IRI(self.CLUSTER_PREFIX, name) return resource_id def get_pod_id(self) -> IRI: From f25aba44ae8f311fc0c4f2a2647bce56248856aa Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 10:00:59 +0200 Subject: [PATCH 30/61] HHT-669: KG Exporter Context tests --- .../mock_metadata_service_client.py | 59 +- .../slice_glaciation-test-master01_80.jsonld | 670 ++++++++++++++++++ .../slice_glaciation-test-worker01_80.jsonld | 577 +++++++++++++++ app/core/kg_updater.py | 2 +- app/core/test_kg_repository.py | 2 +- app/core/test_kg_updater.py | 2 +- app/core/test_snapshot_base.py | 12 +- app/core/types.py | 6 + app/kgexporter_context.py | 2 +- app/kgexporter_settings.py | 5 + app/test_kgexporter_context.py | 82 ++- app/test_pydantic_yaml.py | 3 +- 12 files changed, 1393 insertions(+), 29 deletions(-) create mode 100644 app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld create mode 100644 app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld diff --git a/app/clients/metadata_service/mock_metadata_service_client.py b/app/clients/metadata_service/mock_metadata_service_client.py index aa4b45b..2094b2a 100644 --- a/app/clients/metadata_service/mock_metadata_service_client.py +++ b/app/clients/metadata_service/mock_metadata_service_client.py @@ -1,4 +1,7 @@ -from typing import Dict, List, Optional, TypeAlias +from typing import Dict, List, Optional, Tuple, TypeAlias + +import asyncio +import datetime from app.clients.metadata_service.metadata_service_client import ( MetadataServiceClient, @@ -27,8 +30,10 @@ def get_query_result(self, sparql: SparQLQuery) -> Optional[List[Triple]]: def add_insert(self, result: SerializedGraph) -> None: self.inserts.append(result) - def get_inserts(self) -> List[SerializedGraph]: - return self.inserts + def take_inserts(self) -> List[SerializedGraph]: + inserts = self.inserts + self.inserts = [] + return inserts class MockMetadataServiceClient(MetadataServiceClient): @@ -38,11 +43,11 @@ def __init__(self): self.hosts = dict() def mock_query( - self, host: HostId, sparql: SparQLQuery, result: List[Triple] + self, host_id: HostId, sparql: SparQLQuery, result: List[Triple] ) -> None: - if host not in self.hosts: - self.hosts[host] = HostInteractions() - self.hosts[host].add_query(sparql, result) + if host_id not in self.hosts: + self.hosts[host_id] = HostInteractions() + self.hosts[host_id].add_query(sparql, result) async def query(self, host_and_port: HostId, sparql: SparQLQuery) -> List[Triple]: host_queries = self.hosts.get(host_and_port) @@ -50,14 +55,48 @@ async def query(self, host_and_port: HostId, sparql: SparQLQuery) -> List[Triple return host_queries.get_query_result(sparql) or [] return [] - def get_inserts(self, host: HostId) -> List[SerializedGraph]: - host_interactions = self.hosts.get(host) + def take_inserts(self, host_id: HostId) -> List[SerializedGraph]: + host_interactions = self.hosts.get(host_id) if host_interactions: - return host_interactions.get_inserts() + return host_interactions.take_inserts() else: return [] + def take_all_inserts(self) -> List[Tuple[HostId, SerializedGraph]]: + result = [] + for host_id, host_interaction in self.hosts.items(): + inserts = [(host_id, insert) for insert in host_interaction.take_inserts()] + result.extend(inserts) + return result + async def insert(self, host_and_port: HostId, message: SerializedGraph) -> None: if host_and_port not in self.hosts: self.hosts[host_and_port] = HostInteractions() self.hosts[host_and_port].add_insert(message) + + def wait_for_inserts( + self, seconds: int, count: int + ) -> List[Tuple[HostId, SerializedGraph]]: + start = datetime.datetime.now() + result = [] + while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): + graphs = self.take_all_inserts() + print(len(graphs)) + result.extend(graphs) + if len(graphs) == count: + return graphs + asyncio.run(asyncio.sleep(0.5)) + raise AssertionError("time is up.") + + def wait_for_inserts2( + self, runner: asyncio.Runner, seconds: int, count: int + ) -> List[Tuple[HostId, SerializedGraph]]: + start = datetime.datetime.now() + result = [] + while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): + graphs = self.take_all_inserts() + result.extend(graphs) + if len(graphs) == count: + return graphs + runner.run(asyncio.sleep(0.5)) + raise AssertionError("time is up.") diff --git a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld new file mode 100644 index 0000000..1fd8e5e --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld @@ -0,0 +1,670 @@ +{ + "@context": { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + }, + "@graph": [ + { + "@id": "cluster:glaciation-test-master01", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesWorkerNode", + "glc:hasID": "cluster:glaciation-test-master01", + "glc:hasMeasurement": { + "@id": "cluster:glaciation-test-master01.Energy.Index", + "@type": "glc:Measurement", + "glc:hasDescription": "Energy.Index", + "glc:hasTimestamp": 1, + "glc:hasValue": 1001, + "glc:hasID": "cluster:glaciation-test-master01.Energy.Index", + "glc:measuredIn": "glc:Milliwatt", + "glc:relatesToMeasurementProperty": "glc:Energy.Index" + }, + "glc:hasStatus": { + "@id": "cluster:glaciation-test-master01.Status", + "@type": "glc:Status", + "glc:hasDescription": "Ready", + "glc:startTime": "2024-02-13T13:53:43Z", + "glc:hasID": "cluster:glaciation-test-master01.Status" + }, + "glc:hasSubResource": { + "@set": [ + "cluster:glaciation-test-master01.CPU", + "cluster:glaciation-test-master01.GPU", + "cluster:glaciation-test-master01.Network", + "cluster:glaciation-test-master01.RAM", + "cluster:glaciation-test-master01.Storage" + ] + } + }, + { + "@id": "cluster:glaciation-test-master01.CPU", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "CPU", + "glc:hasID": "cluster:glaciation-test-master01.CPU", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-master01.CPU.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity CPU", + "glc:hasTimestamp": 1, + "glc:hasValue": 4, + "glc:hasID": "cluster:glaciation-test-master01.CPU.Capacity", + "glc:measuredIn": "glc:Core", + "glc:relatesToMeasurementProperty": "glc:CPU.Capacity" + }, + { + "@id": "cluster:glaciation-test-master01.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 41.0, + "glc:hasID": "cluster:glaciation-test-master01.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + } + ] + } + }, + { + "@id": "cluster:glaciation-test-master01.GPU", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "GPU", + "glc:hasID": "cluster:glaciation-test-master01.GPU", + "glc:hasMeasurement": { + "@id": "cluster:glaciation-test-master01.GPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 41.0, + "glc:hasID": "cluster:glaciation-test-master01.GPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:GPU.Usage" + } + }, + { + "@id": "cluster:glaciation-test-master01.Network", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Network", + "glc:hasID": "cluster:glaciation-test-master01.Network" + }, + { + "@id": "cluster:glaciation-test-master01.RAM", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "RAM", + "glc:hasID": "cluster:glaciation-test-master01.RAM", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-master01.RAM.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity", + "glc:hasTimestamp": 1, + "glc:hasValue": 16673939456, + "glc:hasID": "cluster:glaciation-test-master01.RAM.Capacity", + "glc:measuredIn": "glc:Bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Capacity" + }, + { + "@id": "cluster:glaciation-test-master01.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:glaciation-test-master01.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + } + ] + } + }, + { + "@id": "cluster:glaciation-test-master01.Storage", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "EphemeralStorage", + "glc:hasID": "cluster:glaciation-test-master01.Storage", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-master01.Storage.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity", + "glc:hasTimestamp": 1, + "glc:hasValue": 47266578354, + "glc:hasID": "cluster:glaciation-test-master01.Storage.Capacity", + "glc:measuredIn": "glc:Bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Capacity" + }, + { + "@id": "cluster:glaciation-test-master01.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:glaciation-test-master01.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + } + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Pod", + "k8s:hasName": "coredns-787d4945fb-l85r5", + "glc:consumes": "cluster:glaciation-test-master01", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 41.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.Network.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 44.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.Network.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Network.Usage" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + }, + "glc:hasSubResource": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://0ce09d1e8fdff70a58902bb3e73efafa76035dddbe9cec8b4115ac80d09f9963", + "k8s:hasContainerName": "coredns", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns" + }, + { + "@id": "cluster:kubernetes", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesCluster", + "glc:hasID": "cluster:kubernetes", + "glc:hasSubResource": "cluster:glaciation-test-master01" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Pod", + "k8s:hasName": "glaciation-pool-0-0", + "glc:consumes": "cluster:glaciation-test-master01", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 71.0, + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.Network.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 74.0, + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.Network.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Network.Usage" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 72.0, + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 73.0, + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + }, + "glc:hasSubResource": { + "@set": [ + "cluster:minio-tenant.glaciation-pool-0-0.minio", + "cluster:minio-tenant.glaciation-pool-0-0.sidecar", + "cluster:minio-tenant.glaciation-pool-0-0.validate-arguments" + ] + } + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.minio", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://cd294ba419c33b64d33b24583c0f64ef320560942872cf206750d69a11db7ba6", + "k8s:hasContainerName": "minio", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.minio" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.sidecar", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://68ecab8b10b91093bfd72f40c93b9b1545f9b5963e5a4b9b0bcea887be36f7fc", + "k8s:hasContainerName": "sidecar", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.sidecar" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.validate-arguments", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://1b107c003c626739a964d4489f8aa7c8eeba5c9df22f7779ca7bd0ea6957ad6d", + "k8s:hasContainerName": "validate-arguments", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.validate-arguments" + }, + { + "@id": "glc:Bytes", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Bytes" + }, + { + "@id": "glc:CPU.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Capacity" + }, + { + "@id": "glc:CPU.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Usage" + }, + { + "@id": "glc:Core", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Core" + }, + { + "@id": "glc:Energy.Index", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Energy.Index" + }, + { + "@id": "glc:GPU.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:GPU.Usage" + }, + { + "@id": "glc:Milliwatt", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Milliwatt" + }, + { + "@id": "glc:Network.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Network.Usage" + }, + { + "@id": "glc:Performance", + "@type": "glc:Aspect", + "glc:hasID": "glc:Performance" + }, + { + "@id": "glc:Power", + "@type": "glc:Aspect", + "glc:hasID": "glc:Power" + }, + { + "@id": "glc:RAM.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:RAM.Capacity" + }, + { + "@id": "glc:RAM.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:RAM.Usage" + }, + { + "@id": "glc:ResourceSpecification", + "@type": "glc:MeasuringResource", + "glc:hasDescription": "ResourceSpecification", + "glc:hasID": "glc:ResourceSpecification", + "glc:makes": { + "@set": [ + "cluster:glaciation-test-master01.CPU.Capacity", + "cluster:glaciation-test-master01.Energy.Index", + "cluster:glaciation-test-master01.RAM.Capacity", + "cluster:glaciation-test-master01.Storage.Capacity" + ] + } + }, + { + "@id": "glc:Storage.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Storage.Capacity" + }, + { + "@id": "glc:Storage.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Storage.Usage" + }, + { + "@id": "glc:bytes", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:bytes" + }, + { + "@id": "glc:cAdvisor", + "@type": "glc:MeasuringResource", + "glc:hasDescription": "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", + "glc:hasID": "glc:cAdvisor", + "glc:makes": { + "@set": [ + "cluster:glaciation-test-master01.CPU.Usage", + "cluster:glaciation-test-master01.RAM.Usage", + "cluster:glaciation-test-master01.Storage.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.CPU.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.Network.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.RAM.Usage", + "cluster:kube-system.coredns-787d4945fb-l85r5.Storage.Usage", + "cluster:minio-tenant.glaciation-pool-0-0.CPU.Usage", + "cluster:minio-tenant.glaciation-pool-0-0.Network.Usage", + "cluster:minio-tenant.glaciation-pool-0-0.RAM.Usage", + "cluster:minio-tenant.glaciation-pool-0-0.Storage.Usage" + ] + } + }, + { + "@id": "glc:coreseconds", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:coreseconds" + }, + { + "@id": "glc:dcgm", + "@type": "glc:MeasuringResource", + "glc:hasID": "glc:dcgm", + "glc:makes": "cluster:glaciation-test-master01.GPU.Usage" + }, + { + "@id": "cluster:coredns", + "@type": "glc:AssignedTask", + "glc:hasDescription": "Deployment", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:coredns.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.30000000000000004, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns.Energy.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "Energy.Allocated", + "glc:maxValue": 100.0, + "glc:hasAspect": "glc:Power", + "glc:hasID": "cluster:coredns.Energy.Allocated", + "glc:measuredIn": "glc:Milliwatt" + }, + { + "@id": "cluster:coredns.Energy.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "Energy.Capacity", + "glc:maxValue": 100.0, + "glc:hasAspect": "glc:Power", + "glc:hasID": "cluster:coredns.Energy.Capacity", + "glc:measuredIn": "glc:Milliwatt" + }, + { + "@id": "cluster:coredns.GPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "GPU.Allocated", + "glc:maxValue": 101.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.GPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns.GPU.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "GPU.Capacity", + "glc:maxValue": 101.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.GPU.Capacity", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns.Network.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "Network.Allocated", + "glc:maxValue": 1010.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.Network.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.Network.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "Network.Capacity", + "glc:maxValue": 1010.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.Network.Capacity", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 220200960.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 534773760.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns.Storage.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "Storage.Allocated", + "glc:maxValue": 100500.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns.Storage.Allocated", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:coredns", + "glc:hasSubTask": { + "@id": "cluster:coredns-787d4945fb", + "@type": "glc:AssignedTask", + "glc:hasDescription": "ReplicaSet", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:coredns-787d4945fb.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.1, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:coredns-787d4945fb.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 73400320.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:coredns-787d4945fb.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 178257920.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:coredns-787d4945fb.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:coredns-787d4945fb", + "glc:makes": "cluster:kube-system.coredns-787d4945fb-l85r5" + } + }, + { + "@id": "cluster:crd-resource", + "@type": "glc:AssignedTask", + "glc:hasDescription": "CRD", + "glc:makes": "cluster:coredns" + }, + { + "@id": "cluster:glaciation-pool-0", + "@type": "glc:AssignedTask", + "glc:hasDescription": "StatefulSet", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:glaciation-pool-0.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 3.5, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:glaciation-pool-0.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:glaciation-pool-0.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 8589934592.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:glaciation-pool-0.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:glaciation-pool-0", + "glc:makes": "cluster:minio-tenant.glaciation-pool-0-0" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.Status", + "@type": "glc:Status", + "glc:hasDescription": "Running", + "glc:startTime": "2024-02-13T13:53:43Z", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.Status" + }, + { + "@id": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns.Status", + "@type": "glc:Status", + "glc:hasDescription": "running", + "glc:startTime": "2024-02-13T13:53:44Z", + "glc:hasID": "cluster:kube-system.coredns-787d4945fb-l85r5.coredns.Status" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.Status", + "@type": "glc:Status", + "glc:hasDescription": "Running", + "glc:startTime": "2024-05-17T11:53:14Z", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.Status" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.minio.Status", + "@type": "glc:Status", + "glc:hasDescription": "running", + "glc:startTime": "2024-05-17T11:55:54Z", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.minio.Status" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.sidecar.Status", + "@type": "glc:Status", + "glc:hasDescription": "running", + "glc:startTime": "2024-05-17T11:54:19Z", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.sidecar.Status" + }, + { + "@id": "cluster:minio-tenant.glaciation-pool-0-0.validate-arguments.Status", + "@type": "glc:Status", + "glc:endTime": "2024-05-17T11:54:19Z", + "glc:hasDescription": "terminated", + "glc:startTime": "2024-05-17T11:54:19Z", + "glc:hasID": "cluster:minio-tenant.glaciation-pool-0-0.validate-arguments.Status" + }, + { + "@id": "cluster:tenant1", + "@type": "glc:AssignedTask", + "glc:hasDescription": "Tenant", + "glc:makes": "cluster:glaciation-pool-0" + }, + { + "@id": "glc:default-scheduler", + "@type": "glc:Scheduler", + "glc:assigns": { + "@set": [ + "cluster:coredns", + "cluster:coredns-787d4945fb", + "cluster:glaciation-pool-0" + ] + }, + "glc:hasID": "glc:default-scheduler", + "glc:manages": { + "@set": [ + "cluster:kube-system.coredns-787d4945fb-l85r5", + "cluster:kube-system.coredns-787d4945fb-l85r5.coredns", + "cluster:minio-tenant.glaciation-pool-0-0", + "cluster:minio-tenant.glaciation-pool-0-0.minio", + "cluster:minio-tenant.glaciation-pool-0-0.sidecar", + "cluster:minio-tenant.glaciation-pool-0-0.validate-arguments" + ] + } + } + ] +} diff --git a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld new file mode 100644 index 0000000..9c7c95a --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld @@ -0,0 +1,577 @@ +{ + "@context": { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + }, + "@graph": [ + { + "@id": "cluster:glaciation-test-worker01", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesWorkerNode", + "glc:hasID": "cluster:glaciation-test-worker01", + "glc:hasMeasurement": { + "@id": "cluster:glaciation-test-worker01.Energy.Index", + "@type": "glc:Measurement", + "glc:hasDescription": "Energy.Index", + "glc:hasTimestamp": 1, + "glc:hasValue": 1001, + "glc:hasID": "cluster:glaciation-test-worker01.Energy.Index", + "glc:measuredIn": "glc:Milliwatt", + "glc:relatesToMeasurementProperty": "glc:Energy.Index" + }, + "glc:hasStatus": { + "@id": "cluster:glaciation-test-worker01.Status", + "@type": "glc:Status", + "glc:hasDescription": "Ready", + "glc:startTime": "2024-02-23T06:11:02Z", + "glc:hasID": "cluster:glaciation-test-worker01.Status" + }, + "glc:hasSubResource": { + "@set": [ + "cluster:glaciation-test-worker01.CPU", + "cluster:glaciation-test-worker01.GPU", + "cluster:glaciation-test-worker01.Network", + "cluster:glaciation-test-worker01.RAM", + "cluster:glaciation-test-worker01.Storage" + ] + } + }, + { + "@id": "cluster:glaciation-test-worker01.CPU", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "CPU", + "glc:hasID": "cluster:glaciation-test-worker01.CPU", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-worker01.CPU.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity CPU", + "glc:hasTimestamp": 1, + "glc:hasValue": 4, + "glc:hasID": "cluster:glaciation-test-worker01.CPU.Capacity", + "glc:measuredIn": "glc:Core", + "glc:relatesToMeasurementProperty": "glc:CPU.Capacity" + }, + { + "@id": "cluster:glaciation-test-worker01.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 41.0, + "glc:hasID": "cluster:glaciation-test-worker01.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + } + ] + } + }, + { + "@id": "cluster:glaciation-test-worker01.GPU", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "GPU", + "glc:hasID": "cluster:glaciation-test-worker01.GPU" + }, + { + "@id": "cluster:glaciation-test-worker01.Network", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Network", + "glc:hasID": "cluster:glaciation-test-worker01.Network", + "glc:hasMeasurement": { + "@id": "cluster:glaciation-test-worker01.Network.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:glaciation-test-worker01.Network.Usage", + "glc:measuredIn": { + "@set": [ + "glc:bytes5", + "glc:bytes9" + ] + }, + "glc:relatesToMeasurementProperty": "glc:Network.Usage" + } + }, + { + "@id": "cluster:glaciation-test-worker01.RAM", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "RAM", + "glc:hasID": "cluster:glaciation-test-worker01.RAM", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-worker01.RAM.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity", + "glc:hasTimestamp": 1, + "glc:hasValue": 16673947648, + "glc:hasID": "cluster:glaciation-test-worker01.RAM.Capacity", + "glc:measuredIn": "glc:Bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Capacity" + }, + { + "@id": "cluster:glaciation-test-worker01.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 42.0, + "glc:hasID": "cluster:glaciation-test-worker01.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + } + ] + } + }, + { + "@id": "cluster:glaciation-test-worker01.Storage", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "EphemeralStorage", + "glc:hasID": "cluster:glaciation-test-worker01.Storage", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:glaciation-test-worker01.Storage.Capacity", + "@type": "glc:Measurement", + "glc:hasDescription": "Capacity", + "glc:hasTimestamp": 1, + "glc:hasValue": 47266578354, + "glc:hasID": "cluster:glaciation-test-worker01.Storage.Capacity", + "glc:measuredIn": "glc:Bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Capacity" + }, + { + "@id": "cluster:glaciation-test-worker01.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 43.0, + "glc:hasID": "cluster:glaciation-test-worker01.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + } + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Pod", + "k8s:hasName": "kube-flannel-ds-848v8", + "glc:consumes": "cluster:glaciation-test-worker01", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 51.0, + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.Network.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 54.0, + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.Network.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Network.Usage" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 52.0, + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 53.0, + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + }, + "glc:hasSubResource": { + "@set": [ + "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni", + "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni-plugin", + "cluster:kube-flannel.kube-flannel-ds-848v8.kube-flannel" + ] + } + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://b3e5761de29cd8e7a2612a59c48e49882907acca959c9dc54721a43d86c4c69f", + "k8s:hasContainerName": "install-cni", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni-plugin", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://c45d8a7c2b94d7bfd07f9343366b27b7bdced1cb6b07c0f36d4e813bc7f48a74", + "k8s:hasContainerName": "install-cni-plugin", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni-plugin" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.kube-flannel", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://b23fcc9919d7e58fdeb4a729ccf2363c31429e9906a836f55e7d3bf61c908cdb", + "k8s:hasContainerName": "kube-flannel", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.kube-flannel" + }, + { + "@id": "cluster:kubernetes", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesCluster", + "glc:hasID": "cluster:kubernetes", + "glc:hasSubResource": "cluster:glaciation-test-worker01" + }, + { + "@id": "cluster:vault.init-vault-cluster-cbqhq", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Pod", + "k8s:hasName": "init-vault-cluster-cbqhq", + "glc:consumes": "cluster:glaciation-test-worker01", + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq", + "glc:hasMeasurement": { + "@set": [ + { + "@id": "cluster:vault.init-vault-cluster-cbqhq.CPU.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 61.0, + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq.CPU.Usage", + "glc:measuredIn": "glc:coreseconds", + "glc:relatesToMeasurementProperty": "glc:CPU.Usage" + }, + { + "@id": "cluster:vault.init-vault-cluster-cbqhq.Network.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 64.0, + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq.Network.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Network.Usage" + }, + { + "@id": "cluster:vault.init-vault-cluster-cbqhq.RAM.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 62.0, + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq.RAM.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:RAM.Usage" + }, + { + "@id": "cluster:vault.init-vault-cluster-cbqhq.Storage.Usage", + "@type": "glc:Measurement", + "glc:hasDescription": "Usage", + "glc:hasTimestamp": 17100500, + "glc:hasValue": 63.0, + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq.Storage.Usage", + "glc:measuredIn": "glc:bytes", + "glc:relatesToMeasurementProperty": "glc:Storage.Usage" + } + ] + }, + "glc:hasSubResource": "cluster:vault.init-vault-cluster-cbqhq.vault" + }, + { + "@id": "cluster:vault.init-vault-cluster-cbqhq.vault", + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "Container", + "k8s:hasContainerID": "containerd://8d6b55a900568fce8346a5fffb66e7d5dca809f676a15fb6d0f54feb8399ea39", + "k8s:hasContainerName": "vault", + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq.vault" + }, + { + "@id": "glc:Bytes", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Bytes" + }, + { + "@id": "glc:CPU.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Capacity" + }, + { + "@id": "glc:CPU.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:CPU.Usage" + }, + { + "@id": "glc:Core", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Core" + }, + { + "@id": "glc:Energy.Index", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Energy.Index" + }, + { + "@id": "glc:Milliwatt", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:Milliwatt" + }, + { + "@id": "glc:Network.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Network.Usage" + }, + { + "@id": "glc:Performance", + "@type": "glc:Aspect", + "glc:hasID": "glc:Performance" + }, + { + "@id": "glc:RAM.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:RAM.Capacity" + }, + { + "@id": "glc:RAM.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:RAM.Usage" + }, + { + "@id": "glc:ResourceSpecification", + "@type": "glc:MeasuringResource", + "glc:hasDescription": "ResourceSpecification", + "glc:hasID": "glc:ResourceSpecification", + "glc:makes": { + "@set": [ + "cluster:glaciation-test-worker01.CPU.Capacity", + "cluster:glaciation-test-worker01.Energy.Index", + "cluster:glaciation-test-worker01.RAM.Capacity", + "cluster:glaciation-test-worker01.Storage.Capacity" + ] + } + }, + { + "@id": "glc:Storage.Capacity", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Storage.Capacity" + }, + { + "@id": "glc:Storage.Usage", + "@type": "glc:MeasurementProperty", + "glc:hasID": "glc:Storage.Usage" + }, + { + "@id": "glc:bytes", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:bytes" + }, + { + "@id": "glc:bytes5", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:bytes5" + }, + { + "@id": "glc:bytes9", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:bytes9" + }, + { + "@id": "glc:cAdvisor", + "@type": "glc:MeasuringResource", + "glc:hasDescription": "cAdvisor metrics https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md", + "glc:hasID": "glc:cAdvisor", + "glc:makes": { + "@set": [ + "cluster:glaciation-test-worker01.CPU.Usage", + "cluster:glaciation-test-worker01.Network.Usage", + "cluster:glaciation-test-worker01.RAM.Usage", + "cluster:glaciation-test-worker01.Storage.Usage", + "cluster:kube-flannel.kube-flannel-ds-848v8.CPU.Usage", + "cluster:kube-flannel.kube-flannel-ds-848v8.Network.Usage", + "cluster:kube-flannel.kube-flannel-ds-848v8.RAM.Usage", + "cluster:kube-flannel.kube-flannel-ds-848v8.Storage.Usage", + "cluster:vault.init-vault-cluster-cbqhq.CPU.Usage", + "cluster:vault.init-vault-cluster-cbqhq.Network.Usage", + "cluster:vault.init-vault-cluster-cbqhq.RAM.Usage", + "cluster:vault.init-vault-cluster-cbqhq.Storage.Usage" + ] + } + }, + { + "@id": "glc:coreseconds", + "@type": "glc:MeasurementUnit", + "glc:hasID": "glc:coreseconds" + }, + { + "@id": "cluster:init-vault-cluster", + "@type": "glc:AssignedTask", + "glc:hasDescription": "Job", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:init-vault-cluster.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.1, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:init-vault-cluster.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:init-vault-cluster.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 104857600.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:init-vault-cluster.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:init-vault-cluster.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 209715200.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:init-vault-cluster.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:init-vault-cluster", + "glc:makes": "cluster:vault.init-vault-cluster-cbqhq" + }, + { + "@id": "cluster:kube-flannel-ds", + "@type": "glc:AssignedTask", + "glc:hasDescription": "DaemonSet", + "glc:hasConstraint": { + "@set": [ + { + "@id": "cluster:kube-flannel-ds.CPU.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "CPU.Allocated", + "glc:maxValue": 0.15000000000000002, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:kube-flannel-ds.CPU.Allocated", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:kube-flannel-ds.CPU.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "CPU.Capacity", + "glc:maxValue": 0.6000000000000001, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:kube-flannel-ds.CPU.Capacity", + "glc:measuredIn": "glc:Core" + }, + { + "@id": "cluster:kube-flannel-ds.RAM.Allocated", + "@type": "glc:SoftConstraint", + "glc:hasDescription": "RAM.Allocated", + "glc:maxValue": 314572800.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:kube-flannel-ds.RAM.Allocated", + "glc:measuredIn": "glc:Bytes" + }, + { + "@id": "cluster:kube-flannel-ds.RAM.Capacity", + "@type": "glc:HardConstraint", + "glc:hasDescription": "RAM.Capacity", + "glc:maxValue": 1572864000.0, + "glc:hasAspect": "glc:Performance", + "glc:hasID": "cluster:kube-flannel-ds.RAM.Capacity", + "glc:measuredIn": "glc:Bytes" + } + ] + }, + "glc:hasID": "cluster:kube-flannel-ds", + "glc:makes": "cluster:kube-flannel.kube-flannel-ds-848v8" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.Status", + "@type": "glc:Status", + "glc:hasDescription": "Running", + "glc:startTime": "2023-10-20T11:01:50Z", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.Status" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni-plugin.Status", + "@type": "glc:Status", + "glc:endTime": "2023-12-11T11:10:14Z", + "glc:hasDescription": "terminated", + "glc:startTime": "2023-12-11T11:10:14Z", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni-plugin.Status" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni.Status", + "@type": "glc:Status", + "glc:endTime": "2023-12-11T11:10:15Z", + "glc:hasDescription": "terminated", + "glc:startTime": "2023-12-11T11:10:15Z", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni.Status" + }, + { + "@id": "cluster:kube-flannel.kube-flannel-ds-848v8.kube-flannel.Status", + "@type": "glc:Status", + "glc:hasDescription": "running", + "glc:startTime": "2023-12-11T11:10:16Z", + "glc:hasID": "cluster:kube-flannel.kube-flannel-ds-848v8.kube-flannel.Status" + }, + { + "@id": "cluster:vault.init-vault-cluster-cbqhq.Status", + "@type": "glc:Status", + "glc:hasDescription": "Succeeded", + "glc:startTime": "2024-05-17T11:37:57Z", + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq.Status" + }, + { + "@id": "cluster:vault.init-vault-cluster-cbqhq.vault.Status", + "@type": "glc:Status", + "glc:endTime": "2024-05-17T11:39:02Z", + "glc:hasDescription": "terminated", + "glc:startTime": "2024-05-17T11:38:09Z", + "glc:hasID": "cluster:vault.init-vault-cluster-cbqhq.vault.Status" + }, + { + "@id": "glc:default-scheduler", + "@type": "glc:Scheduler", + "glc:assigns": { + "@set": [ + "cluster:init-vault-cluster", + "cluster:kube-flannel-ds" + ] + }, + "glc:hasID": "glc:default-scheduler", + "glc:manages": { + "@set": [ + "cluster:kube-flannel.kube-flannel-ds-848v8", + "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni", + "cluster:kube-flannel.kube-flannel-ds-848v8.install-cni-plugin", + "cluster:kube-flannel.kube-flannel-ds-848v8.kube-flannel", + "cluster:vault.init-vault-cluster-cbqhq", + "cluster:vault.init-vault-cluster-cbqhq.vault" + ] + } + } + ] +} diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py index 6938bd5..4aea6d7 100644 --- a/app/core/kg_updater.py +++ b/app/core/kg_updater.py @@ -33,4 +33,4 @@ async def run(self) -> None: ) await self.kg_repository.update(slice.slice_id, slice.graph) else: - await asyncio.sleep(0.3) + await asyncio.sleep(0.5) diff --git a/app/core/test_kg_repository.py b/app/core/test_kg_repository.py index 61c478d..fcb6370 100644 --- a/app/core/test_kg_repository.py +++ b/app/core/test_kg_repository.py @@ -20,7 +20,7 @@ def test_update(self) -> None: graph, expected = self.simple_node() asyncio.run(repository.update(slice_id, graph)) - graphs = client.get_inserts(slice_id.get_host_port()) + graphs = client.take_inserts(slice_id.get_host_port()) self.assertEqual(expected, graphs[0]) def test_query(self) -> None: diff --git a/app/core/test_kg_updater.py b/app/core/test_kg_updater.py index 0d98bb2..546ce14 100644 --- a/app/core/test_kg_updater.py +++ b/app/core/test_kg_updater.py @@ -43,7 +43,7 @@ def test_kg_updater(self) -> None: def wait_for_graph(self, slice_id: KGSliceId, seconds: int) -> SerializedGraph: start = datetime.datetime.now() while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): - graphs = self.client.get_inserts(slice_id.get_host_port()) + graphs = self.client.take_inserts(slice_id.get_host_port()) if len(graphs) > 0: return graphs[0] self.runner.run(asyncio.sleep(0.1)) diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index 7bd7df3..44a98a7 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -119,10 +119,18 @@ def assert_graph(self, graph: Graph, snapshot_id: str, slice_id: KGSliceId) -> N node_jsonld = self.load_jsonld(file_path) buffer = StringIO() - JsonLDSerialializer(self.get_jsonld_config()).write(buffer, graph) + JsonLDSerialializer(self.get_test_jsonld_config()).write(buffer, graph) self.assertEqual(json.loads(buffer.getvalue()), node_jsonld) # type: ignore - def get_jsonld_config(self) -> JsonLDConfiguration: + def assert_serialized_graph( + self, snapshot_id: str, slice_id: KGSliceId, actual_graph: str + ) -> None: + file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/slice_{slice_id.node_ip}_{slice_id.port}.jsonld" + node_jsonld = self.load_jsonld(file_path) + + self.assertEqual(json.loads(actual_graph), node_jsonld) # type: ignore + + def get_test_jsonld_config(self) -> JsonLDConfiguration: contexts: Dict[IdBase, Dict[str, Any]] = { JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { "k8s": "http://glaciation-project.eu/model/k8s/", diff --git a/app/core/types.py b/app/core/types.py index 631ea16..727d71f 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -1,5 +1,6 @@ from typing import List, Set, Tuple +import urllib.parse from dataclasses import dataclass, field from app.clients.k8s.k8s_client import ResourceSnapshot @@ -16,6 +17,11 @@ class KGSliceId: def get_host_port(self) -> str: return f"{self.node_ip}:{self.port}" + @staticmethod + def from_host_port(host_and_port: str) -> "KGSliceId": + result = urllib.parse.urlsplit(f"//{host_and_port}") + return KGSliceId(str(result.hostname), result.port or 80) + @dataclass class DKGSlice: diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 3855249..12b2cbd 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -61,7 +61,7 @@ def start(self) -> None: return self.running.set() self.runner.run(self.run_tasks()) - server, _ = start_http_server(8080) + server, _ = start_http_server(self.settings.prometheus.endpoint_port) self.prometheus_server = server async def run_tasks(self) -> None: diff --git a/app/kgexporter_settings.py b/app/kgexporter_settings.py index fd70ddf..69ceb87 100644 --- a/app/kgexporter_settings.py +++ b/app/kgexporter_settings.py @@ -8,8 +8,13 @@ from app.core.kg_builder import KGBuilderSettings +class PrometheusSettings(BaseSettings): + endpoint_port: int = 8080 + + class KGExporterSettings(BaseSettings): builder: KGBuilderSettings k8s: K8SSettings influxdb: InfluxDBSettings metadata: MetadataServiceSettings + prometheus: PrometheusSettings diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 98dd6db..d9497f7 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -1,3 +1,5 @@ +from typing import List, Tuple + from unittest import TestCase from app.clients.influxdb.influxdb_settings import InfluxDBSettings @@ -9,19 +11,78 @@ ) from app.clients.metadata_service.mock_metadata_service_client import ( MockMetadataServiceClient, + SerializedGraph, ) from app.core.kg_builder import KGBuilderSettings, QuerySettings +from app.core.test_snapshot_base import SnapshotTestBase +from app.core.types import KGSliceId from app.kgexporter_context import KGExporterContext -from app.kgexporter_settings import KGExporterSettings +from app.kgexporter_settings import KGExporterSettings, PrometheusSettings from app.serialize.jsonld_configuration import JsonLDConfiguration -class KGExporterContextTest(TestCase): - def test_start(self): - metadata_client = MockMetadataServiceClient() - k8s_client = MockK8SClient() - influxdb_client = MockInfluxDBClient() - jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) +class KGExporterContextTest(TestCase, SnapshotTestBase): + metadata_client: MockMetadataServiceClient + k8s_client: MockK8SClient + influxdb_client: MockInfluxDBClient + jsonld_config: JsonLDConfiguration + settings: KGExporterSettings + context: KGExporterContext + + def setUp(self) -> None: + self.metadata_client = MockMetadataServiceClient() + self.k8s_client = MockK8SClient() + self.influxdb_client = MockInfluxDBClient() + self.jsonld_config = self.get_test_jsonld_config() + self.settings = self.test_kg_exporter_settings() + self.context = KGExporterContext( + self.metadata_client, + self.k8s_client, + self.influxdb_client, + self.jsonld_config, + self.settings, + ) + + def test_end_to_end_minimal(self) -> None: + self.mock_inputs( + "minimal", + self.k8s_client, + self.influxdb_client, + self.settings.builder.queries, + ) + self.context.start() + + inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 2, 1) + + self.assert_graphs("minimal", inserts) + + self.context.stop() + + def test_end_to_end_multinode(self) -> None: + self.settings.prometheus.endpoint_port = 8081 + self.mock_inputs( + "multinode", + self.k8s_client, + self.influxdb_client, + self.settings.builder.queries, + ) + self.context.start() + + inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 2, 2) + + self.assert_graphs("multinode", inserts) + + self.context.stop() + + def assert_graphs( + self, snapshot_identity: str, inserts: List[Tuple[str, SerializedGraph]] + ) -> None: + for insert in inserts: + host_and_port, serialized_graph = insert + slice_id = KGSliceId.from_host_port(host_and_port) + self.assert_serialized_graph(snapshot_identity, slice_id, serialized_graph) + + def test_kg_exporter_settings(self) -> KGExporterSettings: settings = KGExporterSettings( builder=KGBuilderSettings( builder_tick_seconds=1, node_port=80, queries=QuerySettings() @@ -31,9 +92,6 @@ def test_start(self): url="test", token="token", org="org", timeout=60000 ), metadata=MetadataServiceSettings(), + prometheus=PrometheusSettings(endpoint_port=8080), ) - context = KGExporterContext( - metadata_client, k8s_client, influxdb_client, jsonld_config, settings - ) - context.start() - context.stop() + return settings diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py index cd16825..16b6ce5 100644 --- a/app/test_pydantic_yaml.py +++ b/app/test_pydantic_yaml.py @@ -7,7 +7,7 @@ MetadataServiceSettings, ) from app.core.kg_builder import KGBuilderSettings, QuerySettings -from app.kgexporter_settings import KGExporterSettings +from app.kgexporter_settings import KGExporterSettings, PrometheusSettings from app.pydantic_yaml import from_yaml, to_yaml @@ -22,6 +22,7 @@ def test_dump_load_settings(self): url="test", token="token", org="org", timeout=60000 ), metadata=MetadataServiceSettings(), + prometheus=PrometheusSettings(endpoint_port=8080), ) with TemporaryDirectory("-pydantic", "test") as tmpdir: yaml_file = f"{tmpdir}/test.yaml" From e191bcfceac561629bf76f35c3b65c53801ddf9d Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 10:59:40 +0200 Subject: [PATCH 31/61] HHT-669: KG Builder tick --- .../slice_glaciation-test-master01_80.jsonld | 8 ++++---- .../slice_glaciation-test-master01_80.jsonld | 8 ++++---- .../slice_glaciation-test-worker01_80.jsonld | 8 ++++---- app/core/kg_builder.py | 15 +++++++++++++-- app/core/test_kg_builder.py | 7 ++++++- app/core/test_kgslice_assembler.py | 4 ++-- app/kgexporter_context.py | 3 +++ app/kgexporter_context_builder.py | 7 +++++-- app/test_kgexporter_context.py | 7 ++++++- app/util/__init__.py | 0 app/util/clock.py | 3 +++ app/util/clock_impl.py | 8 ++++++++ app/util/mock_clock.py | 14 ++++++++++++++ etc/config.yaml | 19 +++++++++++++++++++ 14 files changed, 91 insertions(+), 20 deletions(-) create mode 100644 app/util/__init__.py create mode 100644 app/util/clock.py create mode 100644 app/util/clock_impl.py create mode 100644 app/util/mock_clock.py create mode 100644 etc/config.yaml diff --git a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld index e8097c6..ea0f35a 100644 --- a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld +++ b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld @@ -15,7 +15,7 @@ "@id": "cluster:glaciation-test-master01.Energy.Index", "@type": "glc:Measurement", "glc:hasDescription": "Energy.Index", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 1001, "glc:hasID": "cluster:glaciation-test-master01.Energy.Index", "glc:measuredIn": "glc:Milliwatt", @@ -49,7 +49,7 @@ "@id": "cluster:glaciation-test-master01.CPU.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity CPU", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 4, "glc:hasID": "cluster:glaciation-test-master01.CPU.Capacity", "glc:measuredIn": "glc:Core", @@ -101,7 +101,7 @@ "@id": "cluster:glaciation-test-master01.RAM.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 16673939456, "glc:hasID": "cluster:glaciation-test-master01.RAM.Capacity", "glc:measuredIn": "glc:Bytes", @@ -131,7 +131,7 @@ "@id": "cluster:glaciation-test-master01.Storage.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 47266578354, "glc:hasID": "cluster:glaciation-test-master01.Storage.Capacity", "glc:measuredIn": "glc:Bytes", diff --git a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld index 1fd8e5e..e07f58b 100644 --- a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld +++ b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld @@ -15,7 +15,7 @@ "@id": "cluster:glaciation-test-master01.Energy.Index", "@type": "glc:Measurement", "glc:hasDescription": "Energy.Index", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 1001, "glc:hasID": "cluster:glaciation-test-master01.Energy.Index", "glc:measuredIn": "glc:Milliwatt", @@ -49,7 +49,7 @@ "@id": "cluster:glaciation-test-master01.CPU.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity CPU", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 4, "glc:hasID": "cluster:glaciation-test-master01.CPU.Capacity", "glc:measuredIn": "glc:Core", @@ -101,7 +101,7 @@ "@id": "cluster:glaciation-test-master01.RAM.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 16673939456, "glc:hasID": "cluster:glaciation-test-master01.RAM.Capacity", "glc:measuredIn": "glc:Bytes", @@ -131,7 +131,7 @@ "@id": "cluster:glaciation-test-master01.Storage.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 47266578354, "glc:hasID": "cluster:glaciation-test-master01.Storage.Capacity", "glc:measuredIn": "glc:Bytes", diff --git a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld index 9c7c95a..2328d5c 100644 --- a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld +++ b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld @@ -15,7 +15,7 @@ "@id": "cluster:glaciation-test-worker01.Energy.Index", "@type": "glc:Measurement", "glc:hasDescription": "Energy.Index", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 1001, "glc:hasID": "cluster:glaciation-test-worker01.Energy.Index", "glc:measuredIn": "glc:Milliwatt", @@ -49,7 +49,7 @@ "@id": "cluster:glaciation-test-worker01.CPU.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity CPU", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 4, "glc:hasID": "cluster:glaciation-test-worker01.CPU.Capacity", "glc:measuredIn": "glc:Core", @@ -106,7 +106,7 @@ "@id": "cluster:glaciation-test-worker01.RAM.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 16673947648, "glc:hasID": "cluster:glaciation-test-worker01.RAM.Capacity", "glc:measuredIn": "glc:Bytes", @@ -136,7 +136,7 @@ "@id": "cluster:glaciation-test-worker01.Storage.Capacity", "@type": "glc:Measurement", "glc:hasDescription": "Capacity", - "glc:hasTimestamp": 1, + "glc:hasTimestamp": 1000, "glc:hasValue": 47266578354, "glc:hasID": "cluster:glaciation-test-worker01.Storage.Capacity", "glc:measuredIn": "glc:Bytes", diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index fd016f3..c7190f1 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -13,6 +13,7 @@ from app.core.slice_for_node_strategy import SliceForNodeStrategy from app.core.slice_strategy import SliceStrategy from app.core.types import DKGSlice, MetricSnapshot +from app.util.clock import Clock class QuerySettings(BaseSettings): @@ -40,6 +41,7 @@ class KGBuilder: def __init__( self, running: asyncio.Event, + clock: Clock, queue: AsyncQueue[DKGSlice], k8s_client: K8SClient, kg_repository: KGRepository, @@ -47,6 +49,7 @@ def __init__( settings: KGBuilderSettings, ): self.running = running + self.clock = clock self.k8s_client = k8s_client self.queue = queue self.kg_repository = kg_repository @@ -57,7 +60,8 @@ def __init__( async def run(self) -> None: while self.running.is_set(): - now = 1 + now_seconds = self.clock.now_seconds() + now = now_seconds * 1000 ( cluster_snapshot, pod_metrics, @@ -91,4 +95,11 @@ async def run(self) -> None: now=now, slice_id=slice_id, inputs=slice_inputs ) self.queue.put_nowait(slice) - await asyncio.sleep(30) + + sleep_seconds = ( + now_seconds + + self.settings.builder_tick_seconds + - self.clock.now_seconds() + ) + if sleep_seconds > 0: + await asyncio.sleep(sleep_seconds) diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index 39394d0..faf2351 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -15,9 +15,12 @@ from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import DKGSlice, KGSliceId from app.kg.inmemory_graph import InMemoryGraph +from app.util.clock import Clock +from app.util.mock_clock import MockClock class KGBuilderTest(TestCase, TestGraphFixture, SnapshotTestBase): + clock: Clock client: MockMetadataServiceClient k8s_client: MockK8SClient influxdb_client: MockInfluxDBClient @@ -28,6 +31,7 @@ class KGBuilderTest(TestCase, TestGraphFixture, SnapshotTestBase): def setUp(self) -> None: self.maxDiff = None + self.clock = MockClock() self.client = MockMetadataServiceClient() self.influxdb_client = MockInfluxDBClient() self.queue = AsyncQueue() @@ -49,7 +53,7 @@ def test_build_minimal(self) -> None: slice = self.wait_for_slice(2) - self.assertEqual(slice.timestamp, 1) + self.assertEqual(slice.timestamp, 1000) self.assertEqual(slice.slice_id, KGSliceId("glaciation-test-master01", 80)) self.assertNotEqual(slice.graph, InMemoryGraph()) self.assert_graph(slice.graph, "minimal", slice.slice_id) @@ -59,6 +63,7 @@ def create_builder(self) -> KGBuilder: influxdb_repository = MetricRepository(self.influxdb_client) return KGBuilder( self.running_event, + self.clock, self.queue, self.k8s_client, repository, diff --git a/app/core/test_kgslice_assembler.py b/app/core/test_kgslice_assembler.py index 4fafebe..5fd8f98 100644 --- a/app/core/test_kgslice_assembler.py +++ b/app/core/test_kgslice_assembler.py @@ -13,7 +13,7 @@ def setUp(self): self.maxDiff = None def test_assemble_empty(self) -> None: - now = 1 + now = 1000 slice_id = KGSliceId("127.0.0.1", 80) inputs = self.get_inputs("empty") assembler = KGSliceAssembler() @@ -28,7 +28,7 @@ def test_assemble_empty(self) -> None: self.assert_graph(actual.graph, "empty", slice_id) def test_assemble_minimal(self) -> None: - now = 1 + now = 1000 slice_id = KGSliceId("glaciation-test-master01", 80) inputs = self.get_inputs("minimal") assembler = KGSliceAssembler() diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 12b2cbd..ba1df9c 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -17,6 +17,7 @@ from app.core.types import DKGSlice from app.kgexporter_settings import KGExporterSettings from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.util.clock import Clock class KGExporterContext: @@ -32,6 +33,7 @@ class KGExporterContext: def __init__( self, + clock: Clock, metadata_client: MetadataServiceClient, k8s_client: K8SClient, influxdb_client: InfluxDBClient, @@ -46,6 +48,7 @@ def __init__( self.dkg_slice_store = DKGSliceStore() self.builder = KGBuilder( self.running, + clock, self.queue, k8s_client, kg_repository, diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index 176df61..25ed3fe 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -14,8 +14,9 @@ ) from app.core.kg_builder import KGBuilderSettings, QuerySettings from app.kgexporter_context import KGExporterContext -from app.kgexporter_settings import KGExporterSettings +from app.kgexporter_settings import KGExporterSettings, PrometheusSettings from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.util.clock_impl import ClockImpl class KGExporterContextBuilder: @@ -26,13 +27,14 @@ def __init__(self, args: List[str]): def build(self) -> KGExporterContext: settings = self.get_settings() + clock = ClockImpl() metadata_client = MetadataServiceClientImpl(settings.metadata) k8s_client = K8SClientImpl(settings.k8s) influxdb_client = InfluxDBClientImpl(settings.influxdb) jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) context = KGExporterContext( - metadata_client, k8s_client, influxdb_client, jsonld_config, settings + clock, metadata_client, k8s_client, influxdb_client, jsonld_config, settings ) return context @@ -46,6 +48,7 @@ def get_settings(self) -> KGExporterSettings: url="test", token="token", org="org", timeout=60000 ), metadata=MetadataServiceSettings(), + prometheus=PrometheusSettings(), ) def parse(self): diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index d9497f7..6c8b778 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -19,9 +19,12 @@ from app.kgexporter_context import KGExporterContext from app.kgexporter_settings import KGExporterSettings, PrometheusSettings from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.util.clock import Clock +from app.util.mock_clock import MockClock class KGExporterContextTest(TestCase, SnapshotTestBase): + clock: Clock metadata_client: MockMetadataServiceClient k8s_client: MockK8SClient influxdb_client: MockInfluxDBClient @@ -30,12 +33,14 @@ class KGExporterContextTest(TestCase, SnapshotTestBase): context: KGExporterContext def setUp(self) -> None: + self.clock = MockClock() self.metadata_client = MockMetadataServiceClient() self.k8s_client = MockK8SClient() self.influxdb_client = MockInfluxDBClient() self.jsonld_config = self.get_test_jsonld_config() self.settings = self.test_kg_exporter_settings() self.context = KGExporterContext( + self.clock, self.metadata_client, self.k8s_client, self.influxdb_client, @@ -85,7 +90,7 @@ def assert_graphs( def test_kg_exporter_settings(self) -> KGExporterSettings: settings = KGExporterSettings( builder=KGBuilderSettings( - builder_tick_seconds=1, node_port=80, queries=QuerySettings() + builder_tick_seconds=60, node_port=80, queries=QuerySettings() ), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( diff --git a/app/util/__init__.py b/app/util/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/util/clock.py b/app/util/clock.py new file mode 100644 index 0000000..557bf60 --- /dev/null +++ b/app/util/clock.py @@ -0,0 +1,3 @@ +class Clock: + def now_seconds(self) -> int: + raise NotImplementedError diff --git a/app/util/clock_impl.py b/app/util/clock_impl.py new file mode 100644 index 0000000..f903fb8 --- /dev/null +++ b/app/util/clock_impl.py @@ -0,0 +1,8 @@ +import time + +from app.util.clock import Clock + + +class ClockImpl(Clock): + def now_seconds(self) -> int: + return int(time.time()) diff --git a/app/util/mock_clock.py b/app/util/mock_clock.py new file mode 100644 index 0000000..d320738 --- /dev/null +++ b/app/util/mock_clock.py @@ -0,0 +1,14 @@ +from app.util.clock import Clock + + +class MockClock(Clock): + seconds: int + + def __init__(self): + self.seconds = 1 + + def set_seconds(self, seconds: int) -> None: + self.seconds = seconds + + def now_seconds(self) -> int: + return self.seconds diff --git a/etc/config.yaml b/etc/config.yaml new file mode 100644 index 0000000..210019d --- /dev/null +++ b/etc/config.yaml @@ -0,0 +1,19 @@ +builder: + builder_tick_seconds: 1 + node_port: 80 + queries: + node_queries: [] + pod_queries: [] + workload_queries: [] +influxdb: + org: org + timeout: 60000 + token: token + url: test +k8s: + in_cluster: true +metadata: + metadata_service_push_period_sec: 60 + metadata_service_url: metadata-service +prometheus: + endpoint_port: 8080 From 9f53df0d4457ed14365c4340d17aae19961eec9b Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 12:34:03 +0200 Subject: [PATCH 32/61] HHT-669: Context Builder and arg parsing --- app/kgexporter_context_builder.py | 87 ++++++++++++++------------ app/main.py | 9 +-- app/test_kgexporter_context_builder.py | 27 ++++++++ 3 files changed, 79 insertions(+), 44 deletions(-) create mode 100644 app/test_kgexporter_context_builder.py diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index 25ed3fe..346e7ba 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -1,68 +1,75 @@ -from typing import List +from typing import List, Optional, Tuple import argparse +from argparse import Namespace +from io import StringIO from app.clients.influxdb.influxdb_client_impl import InfluxDBClientImpl -from app.clients.influxdb.influxdb_settings import InfluxDBSettings from app.clients.k8s.k8s_client_impl import K8SClientImpl -from app.clients.k8s.k8s_settings import K8SSettings from app.clients.metadata_service.metadata_service_client_impl import ( MetadataServiceClientImpl, ) -from app.clients.metadata_service.metadata_service_settings import ( - MetadataServiceSettings, -) -from app.core.kg_builder import KGBuilderSettings, QuerySettings from app.kgexporter_context import KGExporterContext -from app.kgexporter_settings import KGExporterSettings, PrometheusSettings +from app.kgexporter_settings import KGExporterSettings +from app.pydantic_yaml import from_yaml from app.serialize.jsonld_configuration import JsonLDConfiguration from app.util.clock_impl import ClockImpl class KGExporterContextBuilder: - args: List[str] - - def __init__(self, args: List[str]): - self.args = args - - def build(self) -> KGExporterContext: - settings = self.get_settings() - clock = ClockImpl() - metadata_client = MetadataServiceClientImpl(settings.metadata) - k8s_client = K8SClientImpl(settings.k8s) - influxdb_client = InfluxDBClientImpl(settings.influxdb) - jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) + settings: KGExporterSettings - context = KGExporterContext( - clock, metadata_client, k8s_client, influxdb_client, jsonld_config, settings - ) - return context + def __init__(self, settings: KGExporterSettings): + self.settings = settings - def get_settings(self) -> KGExporterSettings: - return KGExporterSettings( - builder=KGBuilderSettings( - builder_tick_seconds=1, node_port=80, queries=QuerySettings() - ), - k8s=K8SSettings(in_cluster=True), - influxdb=InfluxDBSettings( - url="test", token="token", org="org", timeout=60000 - ), - metadata=MetadataServiceSettings(), - prometheus=PrometheusSettings(), - ) + @staticmethod + def from_args(args: List[str]) -> Optional["KGExporterContextBuilder"]: + is_success, config_or_msg = KGExporterContextBuilder.parse_args(args) + if is_success: + settings: KGExporterSettings = from_yaml(config_or_msg, KGExporterSettings) # type: ignore + return KGExporterContextBuilder(settings) + else: + print(config_or_msg) + return None - def parse(self): + @staticmethod + def parse_args(args: List[str]) -> Tuple[bool, str]: parser = argparse.ArgumentParser( - description="Kubernetes knowledge graph exporter." + description="Kubernetes knowledge graph exporter.", exit_on_error=False ) parser.add_argument( "--config", dest="config", action="store", help="Configuration of the KGExporter", + required=True, ) - args = parser.parse_args() - print(args) + + try: + ns: Namespace = parser.parse_args(args) + return True, ns.config + except argparse.ArgumentError as e: + message = StringIO() + parser.print_help(message) + message.write(str(e)) + return False, message.getvalue() + + def build(self) -> KGExporterContext: + clock = ClockImpl() + metadata_client = MetadataServiceClientImpl(self.settings.metadata) + k8s_client = K8SClientImpl(self.settings.k8s) + influxdb_client = InfluxDBClientImpl(self.settings.influxdb) + jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) + + context = KGExporterContext( + clock, + metadata_client, + k8s_client, + influxdb_client, + jsonld_config, + self.settings, + ) + return context # logger = logging.getLogger() # logger.setLevel(logging.INFO) diff --git a/app/main.py b/app/main.py index de6fec1..f5213e1 100644 --- a/app/main.py +++ b/app/main.py @@ -6,10 +6,11 @@ def main() -> None: signal.signal(signal.SIGINT, signal.SIG_DFL) - builder = KGExporterContextBuilder(sys.argv[1:]) - context = builder.build() - context.start() - context.wait_for_termination() + builder = KGExporterContextBuilder.from_args(sys.argv[1:]) + if builder: + context = builder.build() + context.start() + context.wait_for_termination() if __name__ == "__main__": diff --git a/app/test_kgexporter_context_builder.py b/app/test_kgexporter_context_builder.py new file mode 100644 index 0000000..27d097b --- /dev/null +++ b/app/test_kgexporter_context_builder.py @@ -0,0 +1,27 @@ +from unittest import TestCase + +from app.kgexporter_context_builder import KGExporterContextBuilder + + +class KGExporterContextBuilderTest(TestCase): + def test_success(self) -> None: + is_success, path = KGExporterContextBuilder.parse_args( + ["--config", "./etc/config.yaml"] + ) + self.assertTrue(is_success) + self.assertEqual(path, "./etc/config.yaml") + + def test_failure(self) -> None: + is_success, msg = KGExporterContextBuilder.parse_args(["--config"]) + self.assertFalse(is_success) + self.assertEqual( + msg, + ( + "usage: python -m unittest [-h] --config CONFIG\n\n" + "Kubernetes knowledge graph exporter.\n\n" + + "options:\n" + + " -h, --help show this help message and exit\n" + + " --config CONFIG Configuration of the KGExporter\n" + + "argument --config: expected one argument" + ), + ) From f64232e762b827d8656493a48f7a01abdb26dae8 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 12:43:50 +0200 Subject: [PATCH 33/61] HHT-669: Context Builder and arg parsing --- app/test_kgexporter_context_builder.py | 2 +- poetry.lock | 749 ++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 730 insertions(+), 22 deletions(-) diff --git a/app/test_kgexporter_context_builder.py b/app/test_kgexporter_context_builder.py index 27d097b..3eb4b70 100644 --- a/app/test_kgexporter_context_builder.py +++ b/app/test_kgexporter_context_builder.py @@ -17,7 +17,7 @@ def test_failure(self) -> None: self.assertEqual( msg, ( - "usage: python -m unittest [-h] --config CONFIG\n\n" + "usage: pytest [-h] --config CONFIG\n\n" "Kubernetes knowledge graph exporter.\n\n" + "options:\n" + " -h, --help show this help message and exit\n" diff --git a/poetry.lock b/poetry.lock index 8c76d4f..f1fa9e2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -393,6 +393,106 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "contourpy" +version = "1.2.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, + {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, + {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, + {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, + {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, + {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, + {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, + {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, + {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, + {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, + {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, + {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, + {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, + {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, + {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, + {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, + {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, + {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, + {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, + {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, + {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, + {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, + {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] + +[[package]] +name = "cycler" +version = "0.12.1" +description = "Composable style cycles" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, +] + +[package.extras] +docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] +tests = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "dateparser" +version = "1.2.0" +description = "Date parsing library designed to parse dates from HTML pages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, + {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = "*" +regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" +tzlocal = "*" + +[package.extras] +calendars = ["convertdate", "hijri-converter"] +fasttext = ["fasttext"] +langdetect = ["langdetect"] + [[package]] name = "distlib" version = "0.3.8" @@ -406,18 +506,18 @@ files = [ [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"}, + {file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -436,6 +536,71 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.2.0,<3.3.0" +[[package]] +name = "fonttools" +version = "4.53.0" +description = "Tools to manipulate font files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.53.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20"}, + {file = "fonttools-4.53.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d"}, + {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6"}, + {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5"}, + {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9"}, + {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca"}, + {file = "fonttools-4.53.0-cp310-cp310-win32.whl", hash = "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068"}, + {file = "fonttools-4.53.0-cp310-cp310-win_amd64.whl", hash = "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68"}, + {file = "fonttools-4.53.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec"}, + {file = "fonttools-4.53.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749"}, + {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161"}, + {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee"}, + {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6"}, + {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e"}, + {file = "fonttools-4.53.0-cp311-cp311-win32.whl", hash = "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005"}, + {file = "fonttools-4.53.0-cp311-cp311-win_amd64.whl", hash = "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796"}, + {file = "fonttools-4.53.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a"}, + {file = "fonttools-4.53.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4"}, + {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06"}, + {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d"}, + {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109"}, + {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2"}, + {file = "fonttools-4.53.0-cp312-cp312-win32.whl", hash = "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea"}, + {file = "fonttools-4.53.0-cp312-cp312-win_amd64.whl", hash = "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380"}, + {file = "fonttools-4.53.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12"}, + {file = "fonttools-4.53.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4"}, + {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce"}, + {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f"}, + {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206"}, + {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd"}, + {file = "fonttools-4.53.0-cp38-cp38-win32.whl", hash = "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af"}, + {file = "fonttools-4.53.0-cp38-cp38-win_amd64.whl", hash = "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b"}, + {file = "fonttools-4.53.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac"}, + {file = "fonttools-4.53.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d"}, + {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1"}, + {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f"}, + {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64"}, + {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0"}, + {file = "fonttools-4.53.0-cp39-cp39-win32.whl", hash = "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9"}, + {file = "fonttools-4.53.0-cp39-cp39-win_amd64.whl", hash = "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2"}, + {file = "fonttools-4.53.0-py3-none-any.whl", hash = "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4"}, + {file = "fonttools-4.53.0.tar.gz", hash = "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002"}, +] + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "pycairo", "scipy"] +lxml = ["lxml (>=4.0)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.1.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + [[package]] name = "frozenlist" version = "1.4.1" @@ -524,13 +689,13 @@ files = [ [[package]] name = "google-auth" -version = "2.29.0" +version = "2.30.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, + {file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"}, + {file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"}, ] [package.dependencies] @@ -556,6 +721,20 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "httmock" +version = "1.4.0" +description = "A mocking library for requests." +optional = false +python-versions = "*" +files = [ + {file = "httmock-1.4.0-py3-none-any.whl", hash = "sha256:13e6c63f135a928e15d386af789a2890efb03e0e280f29bdc9961f3f0dc34cb9"}, + {file = "httmock-1.4.0.tar.gz", hash = "sha256:44eaf4bb59cc64cd6f5d8bf8700b46aa3097cc5651b9bc85c527dfbc71792f41"}, +] + +[package.dependencies] +requests = ">=1.0.0" + [[package]] name = "httpcore" version = "1.0.5" @@ -702,6 +881,119 @@ files = [ [package.dependencies] ply = "*" +[[package]] +name = "kiwisolver" +version = "1.4.5" +description = "A fast implementation of the Cassowary constraint solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, +] + [[package]] name = "kubernetes" version = "29.0.0" @@ -746,6 +1038,58 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] +[[package]] +name = "matplotlib" +version = "3.9.0" +description = "Python plotting package" +optional = false +python-versions = ">=3.9" +files = [ + {file = "matplotlib-3.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56"}, + {file = "matplotlib-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b"}, + {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"}, + {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d"}, + {file = "matplotlib-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4"}, + {file = "matplotlib-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463"}, + {file = "matplotlib-3.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38"}, + {file = "matplotlib-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152"}, + {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85"}, + {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb"}, + {file = "matplotlib-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674"}, + {file = "matplotlib-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be"}, + {file = "matplotlib-3.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382"}, + {file = "matplotlib-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84"}, + {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5"}, + {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db"}, + {file = "matplotlib-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7"}, + {file = "matplotlib-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf"}, + {file = "matplotlib-3.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956"}, + {file = "matplotlib-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a"}, + {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321"}, + {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89"}, + {file = "matplotlib-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b"}, + {file = "matplotlib-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e"}, + {file = "matplotlib-3.9.0.tar.gz", hash = "sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.3.1" +numpy = ">=1.23" +packaging = ">=20.0" +pillow = ">=8" +pyparsing = ">=2.3.1" +python-dateutil = ">=2.7" + +[package.extras] +dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] + [[package]] name = "mccabe" version = "0.7.0" @@ -924,6 +1268,51 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -942,15 +1331,87 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pathspec" version = "0.12.1" @@ -962,6 +1423,92 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + [[package]] name = "platformdirs" version = "4.2.2" @@ -1022,6 +1569,25 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "prometheus-api-client" +version = "0.5.5" +description = "A small python api to collect data from prometheus" +optional = false +python-versions = "*" +files = [ + {file = "prometheus-api-client-0.5.5.tar.gz", hash = "sha256:59449c4be0485ea5a2dfbbed2482eb0a3eeac8c131b3efc866df4c9a0c403bb7"}, + {file = "prometheus_api_client-0.5.5-py3-none-any.whl", hash = "sha256:e65ad16c262da15ed2d2288b521a80278feed2d31e7f0e732a935b67af8e24b4"}, +] + +[package.dependencies] +dateparser = "*" +httmock = "*" +matplotlib = "*" +numpy = "*" +pandas = ">=1.4.0" +requests = "*" + [[package]] name = "prometheus-client" version = "0.20.0" @@ -1074,13 +1640,13 @@ files = [ [[package]] name = "pydantic" -version = "2.7.3" +version = "2.7.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.3-py3-none-any.whl", hash = "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4"}, - {file = "pydantic-2.7.3.tar.gz", hash = "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e"}, + {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, + {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, ] [package.dependencies] @@ -1184,13 +1750,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.3.0" +version = "2.3.3" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.3.0-py3-none-any.whl", hash = "sha256:26eeed27370a9c5e3f64e4a7d6602573cbedf05ed940f1d5b11c3f178427af7a"}, - {file = "pydantic_settings-2.3.0.tar.gz", hash = "sha256:78db28855a71503cfe47f39500a1dece523c640afd5280edb5c5c9c9cfa534c9"}, + {file = "pydantic_settings-2.3.3-py3-none-any.whl", hash = "sha256:e4ed62ad851670975ec11285141db888fd24947f9440bd4380d7d8788d4965de"}, + {file = "pydantic_settings-2.3.3.tar.gz", hash = "sha256:87fda838b64b5039b970cd47c3e8a1ee460ce136278ff672980af21516f6e6ce"}, ] [package.dependencies] @@ -1212,6 +1778,20 @@ files = [ {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pytest" version = "7.4.4" @@ -1292,6 +1872,17 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -1366,6 +1957,94 @@ files = [ [package.dependencies] typing-extensions = ">=4.1.1,<5.0.0" +[[package]] +name = "regex" +version = "2024.5.15" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +files = [ + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, + {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, + {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, + {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, + {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, + {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, + {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, + {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, + {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, + {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, + {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, + {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, +] + [[package]] name = "requests" version = "2.32.3" @@ -1480,15 +2159,43 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.1" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.1-py3-none-any.whl", hash = "sha256:6024b58b69089e5a89c347397254e35f1bf02a907728ec7fee9bf0fe837d203a"}, - {file = "typing_extensions-4.12.1.tar.gz", hash = "sha256:915f5e35ff76f56588223f15fdd5938f9a1cf9195c0de25130c627e4d597f6d1"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, ] +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "urllib3" version = "1.26.18" @@ -1759,4 +2466,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "0c86c4ff0e349b323c4d82ade4b911d92dd667a438674d074983ba706d5cc90d" +content-hash = "7fbd3ca1216cdaac0216b8baf06155b29eb660a4dc51de99025e00daf7b31aa9" diff --git a/pyproject.toml b/pyproject.toml index dc028f8..a8c02d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ aiocsv = "1.3.2" aiohttp = "3.9.3" loguru = "0.7.2" prometheus-client = "0.20.0" +prometheus_api_client = "0.5.5" [tool.poetry.group.dev.dependencies] black = "^23.12" From 1e0c3a06332619f8016c9032eedf1afd1a17b92c Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 12:59:52 +0200 Subject: [PATCH 34/61] HHT-669: KG Exporter Context tests - more time to succeed on the github --- app/test_kgexporter_context.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 6c8b778..1c91044 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -57,7 +57,7 @@ def test_end_to_end_minimal(self) -> None: ) self.context.start() - inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 2, 1) + inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 5, 1) self.assert_graphs("minimal", inserts) @@ -73,7 +73,7 @@ def test_end_to_end_multinode(self) -> None: ) self.context.start() - inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 2, 2) + inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 5, 2) self.assert_graphs("multinode", inserts) From 66937cfc70c0c6dc4f0368dd9b914bc78ee7053d Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 13:29:28 +0200 Subject: [PATCH 35/61] HHT-669: helm chart - mount config.yaml --- Dockerfile | 5 +++-- charts/app/templates/cm.yaml | 9 +++++++++ charts/app/templates/deployment.yaml | 11 ++++++++--- charts/app/values.yaml | 20 +++++++++++++++++++- 4 files changed, 39 insertions(+), 6 deletions(-) create mode 100644 charts/app/templates/cm.yaml diff --git a/Dockerfile b/Dockerfile index 7c55351..d48f24b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,8 @@ RUN pip install --no-cache-dir poetry \ && poetry install --no-root --without dev,test \ && rm -rf $(poetry config cache-dir)/{cache,artifacts} +COPY ./etc/config.yaml /code COPY ./app /code/app -WORKDIR /code/app +WORKDIR /code -CMD ["python", "kg_exporter.py", "--incluster"] +CMD ["python", "-m", "app.main", "--config", "./config.yaml"] diff --git a/charts/app/templates/cm.yaml b/charts/app/templates/cm.yaml new file mode 100644 index 0000000..532702c --- /dev/null +++ b/charts/app/templates/cm.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "app.fullname" . }}-config + labels: + {{- include "app.labels" . | nindent 4 }} +data: + config.yaml: |- + {{- .Values.settings | toYaml | nindent 4 }} diff --git a/charts/app/templates/deployment.yaml b/charts/app/templates/deployment.yaml index 4dc8a12..20c9657 100644 --- a/charts/app/templates/deployment.yaml +++ b/charts/app/templates/deployment.yaml @@ -36,9 +36,14 @@ spec: imagePullPolicy: {{ .Values.image.pullPolicy }} resources: {{- toYaml .Values.resources | nindent 12 }} - env: - - name: METADATA_SERVICE_URL - value: {{ .Values.settings.metadata_service_url }} + volumeMounts: + - name: configuration + mountPath: /code/config.yaml + subPath: config.yaml + volumes: + - name: configuration + configMap: + name: {{ include "app.fullname" . }}-config {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} diff --git a/charts/app/values.yaml b/charts/app/values.yaml index 59b923a..d1860a2 100644 --- a/charts/app/values.yaml +++ b/charts/app/values.yaml @@ -25,4 +25,22 @@ securityContext: {} resources: {} settings: - metadata_service_url: "http://metadata-service.knowledge-graph.svc.cluster.local/api/v0/graph" + builder: + builder_tick_seconds: 60 + node_port: 80 + queries: + node_queries: [] + pod_queries: [] + workload_queries: [] + influxdb: + org: org + timeout: 60000 + token: token + url: test + k8s: + in_cluster: true + metadata: + metadata_service_push_period_sec: 60 + metadata_service_url: metadata-service + prometheus: + endpoint_port: 8080 From 4aa68a1dcb6aed6da90e152517f0ea483fd3f561 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 13:36:47 +0200 Subject: [PATCH 36/61] HHT-669: release specific cluster role and role binding --- charts/app/templates/clusterrole.yaml | 2 +- charts/app/templates/clusterrolebinding.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/charts/app/templates/clusterrole.yaml b/charts/app/templates/clusterrole.yaml index 0425da1..f59f2b2 100644 --- a/charts/app/templates/clusterrole.yaml +++ b/charts/app/templates/clusterrole.yaml @@ -1,7 +1,7 @@ kind: ClusterRole apiVersion: rbac.authorization.k8s.io/v1 metadata: - name: kg-exporter-role + name: {{ include "app.fullname" . }}-role rules: - apiGroups: ["apps", "batch"] resources: ["deployments", "statefulsets", "jobs"] diff --git a/charts/app/templates/clusterrolebinding.yaml b/charts/app/templates/clusterrolebinding.yaml index 23cc89c..ac9b6ee 100644 --- a/charts/app/templates/clusterrolebinding.yaml +++ b/charts/app/templates/clusterrolebinding.yaml @@ -1,12 +1,12 @@ kind: ClusterRoleBinding apiVersion: rbac.authorization.k8s.io/v1 metadata: - name: kg-exporter-role-binding + name: {{ include "app.fullname" . }}-binding subjects: - kind: ServiceAccount name: {{ include "app.fullname" . }} namespace: {{ .Release.Namespace }} roleRef: kind: ClusterRole - name: kg-exporter-role + name: {{ include "app.fullname" . }}-role apiGroup: rbac.authorization.k8s.io From b495357853ecc30f565396d264d7f0660c3cdc30 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 14 Jun 2024 14:17:21 +0200 Subject: [PATCH 37/61] HHT-669: integration testing --- app/clients/k8s/k8s_client_impl.py | 11 ++- .../mock_metadata_service_client.py | 1 + app/core/kg_builder.py | 93 +++++++++++-------- app/core/kg_updater.py | 36 ++++--- app/core/resource_snapshot_index.py | 33 +++++++ app/core/slice_for_node_strategy.py | 38 ++++---- app/core/test_kg_builder.py | 1 - app/core/test_kg_updater.py | 1 - app/kgexporter_context.py | 18 ++-- app/kgexporter_context_builder.py | 5 - app/main.py | 1 + 11 files changed, 148 insertions(+), 90 deletions(-) create mode 100644 app/core/resource_snapshot_index.py diff --git a/app/clients/k8s/k8s_client_impl.py b/app/clients/k8s/k8s_client_impl.py index 36e8414..85495e4 100644 --- a/app/clients/k8s/k8s_client_impl.py +++ b/app/clients/k8s/k8s_client_impl.py @@ -30,10 +30,10 @@ async def get_deployments(self) -> List[Dict[str, Any]]: return await self.get_resource("Deployment") async def get_replicasets(self) -> List[Dict[str, Any]]: - return await self.get_resource("Replicaset") + return await self.get_resource("ReplicaSet") async def get_daemonsets(self) -> List[Dict[str, Any]]: - return await self.get_resource("Daemonset") + return await self.get_resource("DaemonSet") async def get_statefullsets(self) -> List[Dict[str, Any]]: return await self.get_resource("StatefulSet") @@ -42,7 +42,12 @@ async def get_jobs(self) -> List[Dict[str, Any]]: return await self.get_resource("Job") async def get_cluster_info(self) -> Dict[str, Any]: - raise NotImplementedError("get_cluster_info() is not implemented") + configmap_api = self.client.resources.get(api_version="v1", kind="ConfigMap") + results = configmap_api.get(namespace="kube-system", name="kubeadm-config") + if results: + return results.to_dict() # type: ignore + else: + return {} async def get_resource(self, kind: str) -> List[Dict[str, Any]]: api = self.client.resources.get(api_version="v1", kind=kind) diff --git a/app/clients/metadata_service/mock_metadata_service_client.py b/app/clients/metadata_service/mock_metadata_service_client.py index 2094b2a..987713e 100644 --- a/app/clients/metadata_service/mock_metadata_service_client.py +++ b/app/clients/metadata_service/mock_metadata_service_client.py @@ -74,6 +74,7 @@ async def insert(self, host_and_port: HostId, message: SerializedGraph) -> None: self.hosts[host_and_port] = HostInteractions() self.hosts[host_and_port].add_insert(message) + # TODO remove def wait_for_inserts( self, seconds: int, count: int ) -> List[Tuple[HostId, SerializedGraph]]: diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index c7190f1..8c8560a 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -29,7 +29,7 @@ class KGBuilderSettings(BaseSettings): class KGBuilder: - running: asyncio.Event + terminated: asyncio.Event k8s_client: K8SClient queue: AsyncQueue[DKGSlice] kg_repository: KGRepository @@ -40,7 +40,7 @@ class KGBuilder: def __init__( self, - running: asyncio.Event, + terminated: asyncio.Event, clock: Clock, queue: AsyncQueue[DKGSlice], k8s_client: K8SClient, @@ -48,7 +48,7 @@ def __init__( influxdb_repository: MetricRepository, settings: KGBuilderSettings, ): - self.running = running + self.terminated = terminated self.clock = clock self.k8s_client = k8s_client self.queue = queue @@ -59,42 +59,14 @@ def __init__( self.slice_assembler = KGSliceAssembler() async def run(self) -> None: - while self.running.is_set(): + logger.info("Builder started.") + while not self.terminated.is_set(): now_seconds = self.clock.now_seconds() - now = now_seconds * 1000 - ( - cluster_snapshot, - pod_metrics, - node_metrics, - workload_metrics, - ) = await asyncio.gather( - self.k8s_client.fetch_snapshot(), - self.influxdb_repository.query_many( - now, self.settings.queries.pod_queries - ), - self.influxdb_repository.query_many( - now, self.settings.queries.node_queries - ), - self.influxdb_repository.query_many( - now, self.settings.queries.workload_queries - ), - ) - metric_snapshot = MetricSnapshot( - list(zip(self.settings.queries.pod_queries, pod_metrics)), - list(zip(self.settings.queries.node_queries, node_metrics)), - list(zip(self.settings.queries.workload_queries, workload_metrics)), - ) - logger.debug(cluster_snapshot) - logger.debug(pod_metrics) - logger.debug(node_metrics) - logger.debug(workload_metrics) - - slices = self.slice_strategy.get_slices(cluster_snapshot, metric_snapshot) - for slice_id, slice_inputs in slices.items(): - slice = self.slice_assembler.assemble( - now=now, slice_id=slice_id, inputs=slice_inputs - ) - self.queue.put_nowait(slice) + + try: + await self.run_cycle(now_seconds) + except Exception as e: + logger.error(f"Builder error: {e}") sleep_seconds = ( now_seconds @@ -103,3 +75,48 @@ async def run(self) -> None: ) if sleep_seconds > 0: await asyncio.sleep(sleep_seconds) + + logger.info("Builder stopped.") + + async def run_cycle(self, now_seconds: int) -> None: + now = now_seconds * 1000 + ( + cluster_snapshot, + pod_metrics, + node_metrics, + workload_metrics, + ) = await asyncio.gather( + self.k8s_client.fetch_snapshot(), + self.influxdb_repository.query_many(now, self.settings.queries.pod_queries), + self.influxdb_repository.query_many( + now, self.settings.queries.node_queries + ), + self.influxdb_repository.query_many( + now, self.settings.queries.workload_queries + ), + ) + metric_snapshot = MetricSnapshot( + list(zip(self.settings.queries.pod_queries, pod_metrics)), + list(zip(self.settings.queries.node_queries, node_metrics)), + list(zip(self.settings.queries.workload_queries, workload_metrics)), + ) + logger.debug("Cluster snapshot: {size}", size=len(cluster_snapshot.cluster)) + logger.debug("Nodes: {size}", size=len(cluster_snapshot.nodes)) + logger.debug("Pods: {size}", size=len(cluster_snapshot.pods)) + logger.debug("Deployments: {size}", size=len(cluster_snapshot.deployments)) + logger.debug("ReplicaSets: {size}", size=len(cluster_snapshot.replicasets)) + logger.debug("DaemonSets: {size}", size=len(cluster_snapshot.daemonsets)) + logger.debug("StatefullSets: {size}", size=len(cluster_snapshot.statefullsets)) + logger.debug("Jobs: {size}", size=len(cluster_snapshot.jobs)) + logger.debug("NodeMetrics: {size}", size=len(metric_snapshot.node_metrics)) + logger.debug("PodMetrics: {size}", size=len(metric_snapshot.pod_metrics)) + + slices = self.slice_strategy.get_slices(cluster_snapshot, metric_snapshot) + logger.info("Slices produced: {size}", size=len(slices)) + logger.debug("Slices: {slices}", slices=set(slices.keys())) + for slice_id, slice_inputs in slices.items(): + logger.debug("Assembling slice: {slice_id}", slice_id=slice_id) + slice = self.slice_assembler.assemble( + now=now, slice_id=slice_id, inputs=slice_inputs + ) + self.queue.put_nowait(slice) diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py index 4aea6d7..62c8a7c 100644 --- a/app/core/kg_updater.py +++ b/app/core/kg_updater.py @@ -10,27 +10,35 @@ class KGUpdater: queue: AsyncQueue[DKGSlice] kg_repository: KGRepository - running: asyncio.Event + terminated: asyncio.Event def __init__( self, - running: asyncio.Event, + terminated: asyncio.Event, queue: AsyncQueue[DKGSlice], kg_repository: KGRepository, ): self.queue = queue self.kg_repository = kg_repository - self.running = running + self.terminated = terminated async def run(self) -> None: - while self.running.is_set(): - slice = self.queue.get_nowait() - if slice: - logger.debug( - "updating slice {slice}, with timestamp {timestamp}", - slice=slice.slice_id, - timestamp=slice.timestamp, - ) - await self.kg_repository.update(slice.slice_id, slice.graph) - else: - await asyncio.sleep(0.5) + logger.info("Updater started.") + while not self.terminated.is_set(): + try: + await self.run_cycle() + except Exception as e: + logger.error(f"Updater error: {e}") + logger.info("Updater stopped.") + + async def run_cycle(self) -> None: + slice = self.queue.get_nowait() + if slice: + logger.info( + "updating slice {slice}, with timestamp {timestamp}", + slice=slice.slice_id, + timestamp=slice.timestamp, + ) + await self.kg_repository.update(slice.slice_id, slice.graph) + else: + await asyncio.sleep(0.5) diff --git a/app/core/resource_snapshot_index.py b/app/core/resource_snapshot_index.py new file mode 100644 index 0000000..cc664bf --- /dev/null +++ b/app/core/resource_snapshot_index.py @@ -0,0 +1,33 @@ +from typing import Any, Dict, List, Optional, Tuple, TypeAlias + +from app.clients.k8s.k8s_client import ResourceSnapshot + +Kind: TypeAlias = str +Name: TypeAlias = str + + +class ResourceSnapshotIndex: + index: Dict[Tuple[Kind, Name], Dict[str, Any]] + + def __init__(self): + self.index = dict() + + @staticmethod + def build(snapshot: ResourceSnapshot) -> "ResourceSnapshotIndex": + index = ResourceSnapshotIndex() + index.add("Node", snapshot.nodes) + index.add("Pod", snapshot.pods) + index.add("Deployment", snapshot.deployments) + index.add("ReplicaSet", snapshot.replicasets) + index.add("DaemonSet", snapshot.daemonsets) + index.add("StatefulSet", snapshot.statefullsets) + index.add("Job", snapshot.jobs) + return index + + def add(self, kind: str, resources: List[Dict[str, Any]]) -> None: + for resource in resources: + name = resource["metadata"]["name"] + self.index[(kind, name)] = resource + + def get_by(self, kind: str, name: str) -> Optional[Dict[str, Any]]: + return self.index.get((kind, name)) diff --git a/app/core/slice_for_node_strategy.py b/app/core/slice_for_node_strategy.py index 47b5afd..4acce52 100644 --- a/app/core/slice_for_node_strategy.py +++ b/app/core/slice_for_node_strategy.py @@ -3,12 +3,14 @@ from jsonpath_ng.ext import parse from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.resource_snapshot_index import ResourceSnapshotIndex from app.core.slice_strategy import SliceStrategy from app.core.types import KGSliceId, MetricSnapshot, SliceInputs ReferenceKind: TypeAlias = str +# TODO rename class SliceForNodeStrategy(SliceStrategy): node_port: int @@ -19,9 +21,9 @@ def get_slices( self, resources: ResourceSnapshot, metrics: MetricSnapshot ) -> Dict[KGSliceId, SliceInputs]: result: Dict[KGSliceId, SliceInputs] = dict() - + index = ResourceSnapshotIndex.build(resources) for node in resources.nodes: - slice_id, inputs = self.split_node(node, resources, metrics) + slice_id, inputs = self.split_node(node, index, resources, metrics) result[slice_id] = inputs return result @@ -29,6 +31,7 @@ def get_slices( def split_node( self, node: Dict[str, Any], + index: ResourceSnapshotIndex, src_resources: ResourceSnapshot, src_metrics: MetricSnapshot, ) -> Tuple[KGSliceId, SliceInputs]: @@ -37,20 +40,18 @@ def split_node( slice_resources = ResourceSnapshot(cluster=src_resources.cluster, nodes=[node]) slice_metrics = MetricSnapshot() - - self.add_workloads(node_hostname, slice_resources, src_resources) + self.add_workloads(node_hostname, index, slice_resources, src_resources) self.add_metrics(slice_resources, slice_metrics, src_metrics) return slice_id, SliceInputs(slice_resources, slice_metrics) def get_resource_name(self, resource: Dict[str, Any]) -> str: - for match in parse("$.metadata.name").find(resource): - return str(match.value) - raise Exception("Metadata does not contain name.") + return resource["metadata"]["name"] # type: ignore def add_workloads( self, node_hostname: str, + index: ResourceSnapshotIndex, slice_resources: ResourceSnapshot, src_resources: ResourceSnapshot, ) -> None: @@ -59,39 +60,36 @@ def add_workloads( if not hostname or hostname != node_hostname: continue slice_resources.pods.append(pod) - self.add_parent_resources(pod, slice_resources, src_resources) + self.add_parent_resources(pod, index, slice_resources, src_resources) def add_parent_resources( self, resource: Dict[str, Any], + index: ResourceSnapshotIndex, slice_resources: ResourceSnapshot, src_resources: ResourceSnapshot, ) -> None: for parent_kind, parent_identity in self.get_owner_references(resource): - src_found_resources = src_resources.find_resources_by_kind_and_identity( - parent_kind, parent_identity - ) - slice_resources.add_resources_by_kind(parent_kind, src_found_resources) - for found_resource in src_found_resources: + src_found_resource = index.get_by(parent_kind, parent_identity) + if src_found_resource: + slice_resources.add_resources_by_kind(parent_kind, [src_found_resource]) self.add_parent_resources( - found_resource, slice_resources, src_resources + src_found_resource, index, slice_resources, src_resources ) def get_owner_references( self, resource: Dict[str, Any] ) -> List[Tuple[ReferenceKind, str]]: - references_match = parse("$.metadata.ownerReferences").find(resource) - if len(references_match) == 0: + references_matches = resource["metadata"].get("ownerReferences") or [] + if len(references_matches) == 0: return [] return [ self.get_reference_id(reference_match) - for reference_match in references_match[0].value + for reference_match in references_matches ] def get_pod_hostname(self, pod: Dict[str, Any]) -> Optional[str]: - for match in parse("$.spec.nodeName").find(pod): - return str(match.value) - return None + return pod.get("spec").get("nodeName") # type: ignore def get_reference_id(self, reference: Dict[str, Any]) -> Tuple[str, str]: return reference["kind"], reference["name"] diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index faf2351..892e59b 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -37,7 +37,6 @@ def setUp(self) -> None: self.queue = AsyncQueue() self.k8s_client = MockK8SClient() self.running_event = asyncio.Event() - self.running_event.set() self.runner = asyncio.Runner() self.settings = KGBuilderSettings( builder_tick_seconds=1, node_port=80, queries=QuerySettings() diff --git a/app/core/test_kg_updater.py b/app/core/test_kg_updater.py index 546ce14..1d00f1e 100644 --- a/app/core/test_kg_updater.py +++ b/app/core/test_kg_updater.py @@ -23,7 +23,6 @@ def setUp(self) -> None: self.client = MockMetadataServiceClient() self.queue = AsyncQueue() self.running_event = asyncio.Event() - self.running_event.set() self.runner = asyncio.Runner() def test_kg_updater(self) -> None: diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index ba1df9c..8a33dee 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -3,6 +3,7 @@ import asyncio from wsgiref.simple_server import WSGIServer +from loguru import logger from prometheus_client import start_http_server from app.clients.influxdb.influxdb_client import InfluxDBClient @@ -26,7 +27,7 @@ class KGExporterContext: queue: AsyncQueue[DKGSlice] runner: asyncio.Runner dkg_slice_store: DKGSliceStore - running: asyncio.Event + terminated: asyncio.Event prometheus_server: WSGIServer tasks: List[asyncio.Task[Any]] settings: KGExporterSettings @@ -43,11 +44,11 @@ def __init__( self.settings = settings kg_repository = KGRepository(metadata_client, jsonld_config) influxdb_repository = MetricRepository(influxdb_client) - self.running = asyncio.Event() + self.terminated = asyncio.Event() self.queue = AsyncQueue[DKGSlice]() self.dkg_slice_store = DKGSliceStore() self.builder = KGBuilder( - self.running, + self.terminated, clock, self.queue, k8s_client, @@ -55,14 +56,14 @@ def __init__( influxdb_repository, self.settings.builder, ) - self.updater = KGUpdater(self.running, self.queue, kg_repository) + self.updater = KGUpdater(self.terminated, self.queue, kg_repository) self.runner = asyncio.Runner() self.tasks = [] def start(self) -> None: - if self.running.is_set(): + if self.terminated.is_set(): return - self.running.set() + self.terminated.clear() self.runner.run(self.run_tasks()) server, _ = start_http_server(self.settings.prometheus.endpoint_port) self.prometheus_server = server @@ -72,8 +73,9 @@ async def run_tasks(self) -> None: self.tasks.append(asyncio.create_task(self.updater.run())) def stop(self) -> None: - self.running.clear() + self.terminated.set() self.prometheus_server.shutdown() def wait_for_termination(self) -> None: - self.runner.run(self.running.wait()) + self.runner.run(self.terminated.wait()) + logger.info("Application terminated.") diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index 346e7ba..1de1004 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -70,8 +70,3 @@ def build(self) -> KGExporterContext: self.settings, ) return context - - # logger = logging.getLogger() - # logger.setLevel(logging.INFO) - # console_handler = logging.StreamHandler() - # logger.addHandler(console_handler) diff --git a/app/main.py b/app/main.py index f5213e1..99818d3 100644 --- a/app/main.py +++ b/app/main.py @@ -11,6 +11,7 @@ def main() -> None: context = builder.build() context.start() context.wait_for_termination() + context.stop() if __name__ == "__main__": From cd34edadc1bb5a72d2f11c5c60f7ca7cea2e5946 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Mon, 17 Jun 2024 13:49:45 +0200 Subject: [PATCH 38/61] HHT-669: performance optimization of json parsing --- .../slice_glaciation-test-master01_80.jsonld | 1 + .../slice_glaciation-test-master01_80.jsonld | 2 + app/core/test_snapshot_base.py | 1 - app/k8s_transform/__fixture__/cluster.jsonld | 22 +- app/k8s_transform/__fixture__/cluster.turtle | 10 + .../__fixture__/deployment.jsonld | 1 + .../__fixture__/deployment.turtle | 1 + app/k8s_transform/__fixture__/pod1.jsonld | 1 + app/k8s_transform/__fixture__/pod1.turtle | 1 + app/k8s_transform/__fixture__/pod2.jsonld | 1 + app/k8s_transform/__fixture__/pod2.turtle | 1 + app/k8s_transform/__fixture__/pod3.jsonld | 1 + app/k8s_transform/__fixture__/pod3.turtle | 1 + .../__fixture__/replicaset.jsonld | 1 + .../__fixture__/replicaset.turtle | 1 + .../__fixture__/statefulset.jsonld | 1 + .../__fixture__/statefulset.turtle | 1 + app/k8s_transform/cluster_transformer.py | 6 +- app/k8s_transform/node_transformer.py | 32 +-- app/k8s_transform/pod_transformer.py | 31 ++- app/k8s_transform/transformer_base.py | 103 ++++++++- app/k8s_transform/upper_ontology_base.py | 2 + app/k8s_transform/workload_transformer.py | 201 ++++++++++++++---- 23 files changed, 334 insertions(+), 89 deletions(-) diff --git a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld index ea0f35a..93735fd 100644 --- a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld +++ b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld @@ -467,6 +467,7 @@ "@id": "cluster:crd-resource", "@type": "glc:AssignedTask", "glc:hasDescription": "CRD", + "glc:hasID": "cluster:crd-resource", "glc:makes": "cluster:coredns" }, { diff --git a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld index e07f58b..da238b3 100644 --- a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld +++ b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld @@ -564,6 +564,7 @@ "@id": "cluster:crd-resource", "@type": "glc:AssignedTask", "glc:hasDescription": "CRD", + "glc:hasID": "cluster:crd-resource", "glc:makes": "cluster:coredns" }, { @@ -642,6 +643,7 @@ "@id": "cluster:tenant1", "@type": "glc:AssignedTask", "glc:hasDescription": "Tenant", + "glc:hasID": "cluster:tenant1", "glc:makes": "cluster:glaciation-pool-0" }, { diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index 44a98a7..5f0dc66 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -127,7 +127,6 @@ def assert_serialized_graph( ) -> None: file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/slice_{slice_id.node_ip}_{slice_id.port}.jsonld" node_jsonld = self.load_jsonld(file_path) - self.assertEqual(json.loads(actual_graph), node_jsonld) # type: ignore def get_test_jsonld_config(self) -> JsonLDConfiguration: diff --git a/app/k8s_transform/__fixture__/cluster.jsonld b/app/k8s_transform/__fixture__/cluster.jsonld index 79f0a35..1ea46ae 100644 --- a/app/k8s_transform/__fixture__/cluster.jsonld +++ b/app/k8s_transform/__fixture__/cluster.jsonld @@ -8,29 +8,39 @@ "@graph": [ { "@id": "cluster:glaciation-mast01", - "@type": "glc:WorkProducingResource" + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesWorkerNode", + "glc:hasID": "cluster:glaciation-mast01" }, { "@id": "cluster:glaciation-worker01", - "@type": "glc:WorkProducingResource" + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesWorkerNode", + "glc:hasID": "cluster:glaciation-worker01" }, { "@id": "cluster:glaciation-worker02", - "@type": "glc:WorkProducingResource" + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesWorkerNode", + "glc:hasID": "cluster:glaciation-worker02" }, { "@id": "cluster:glaciation-worker03", - "@type": "glc:WorkProducingResource" + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesWorkerNode", + "glc:hasID": "cluster:glaciation-worker03" }, { "@id": "cluster:glaciation-worker04", - "@type": "glc:WorkProducingResource" + "@type": "glc:WorkProducingResource", + "glc:hasDescription": "KubernetesWorkerNode", + "glc:hasID": "cluster:glaciation-worker04" }, { "@id": "cluster:kubernetes", "@type": "glc:WorkProducingResource", - "glc:hasID": "cluster:kubernetes", "glc:hasDescription": "KubernetesCluster", + "glc:hasID": "cluster:kubernetes", "glc:hasSubResource": { "@set": [ "cluster:glaciation-mast01", diff --git a/app/k8s_transform/__fixture__/cluster.turtle b/app/k8s_transform/__fixture__/cluster.turtle index 26706ef..cd8ab66 100644 --- a/app/k8s_transform/__fixture__/cluster.turtle +++ b/app/k8s_transform/__fixture__/cluster.turtle @@ -1,8 +1,18 @@ cluster:glaciation-mast01 rdf:type glc:WorkProducingResource . +cluster:glaciation-mast01 glc:hasDescription "KubernetesWorkerNode" . +cluster:glaciation-mast01 glc:hasID cluster:glaciation-mast01 . cluster:glaciation-worker01 rdf:type glc:WorkProducingResource . +cluster:glaciation-worker01 glc:hasDescription "KubernetesWorkerNode" . +cluster:glaciation-worker01 glc:hasID cluster:glaciation-worker01 . cluster:glaciation-worker02 rdf:type glc:WorkProducingResource . +cluster:glaciation-worker02 glc:hasDescription "KubernetesWorkerNode" . +cluster:glaciation-worker02 glc:hasID cluster:glaciation-worker02 . cluster:glaciation-worker03 rdf:type glc:WorkProducingResource . +cluster:glaciation-worker03 glc:hasDescription "KubernetesWorkerNode" . +cluster:glaciation-worker03 glc:hasID cluster:glaciation-worker03 . cluster:glaciation-worker04 rdf:type glc:WorkProducingResource . +cluster:glaciation-worker04 glc:hasDescription "KubernetesWorkerNode" . +cluster:glaciation-worker04 glc:hasID cluster:glaciation-worker04 . cluster:kubernetes rdf:type glc:WorkProducingResource . cluster:kubernetes glc:hasDescription "KubernetesCluster" . cluster:kubernetes glc:hasID cluster:kubernetes . diff --git a/app/k8s_transform/__fixture__/deployment.jsonld b/app/k8s_transform/__fixture__/deployment.jsonld index 9c493d6..93d2954 100644 --- a/app/k8s_transform/__fixture__/deployment.jsonld +++ b/app/k8s_transform/__fixture__/deployment.jsonld @@ -135,6 +135,7 @@ "@id": "cluster:crd-resource", "@type": "glc:AssignedTask", "glc:hasDescription": "CRD", + "glc:hasID": "cluster:crd-resource", "glc:makes": "cluster:coredns" }, { diff --git a/app/k8s_transform/__fixture__/deployment.turtle b/app/k8s_transform/__fixture__/deployment.turtle index 08f2ea1..018f6f7 100644 --- a/app/k8s_transform/__fixture__/deployment.turtle +++ b/app/k8s_transform/__fixture__/deployment.turtle @@ -64,6 +64,7 @@ cluster:coredns.Storage.Allocated glc:hasID cluster:coredns.Storage.Allocated . cluster:coredns.Storage.Allocated glc:measuredIn glc:Bytes . cluster:crd-resource rdf:type glc:AssignedTask . cluster:crd-resource glc:hasDescription "CRD" . +cluster:crd-resource glc:hasID cluster:crd-resource . cluster:crd-resource glc:makes cluster:coredns . glc:Bytes rdf:type glc:MeasurementUnit . glc:Bytes glc:hasID glc:Bytes . diff --git a/app/k8s_transform/__fixture__/pod1.jsonld b/app/k8s_transform/__fixture__/pod1.jsonld index 193d468..8a20284 100644 --- a/app/k8s_transform/__fixture__/pod1.jsonld +++ b/app/k8s_transform/__fixture__/pod1.jsonld @@ -33,6 +33,7 @@ "@id": "cluster:coredns-787d4945fb", "@type": "glc:AssignedTask", "glc:hasDescription": "ReplicaSet", + "glc:hasID": "cluster:coredns-787d4945fb", "glc:makes": "cluster:kube-system.coredns-787d4945fb-l85r5" }, { diff --git a/app/k8s_transform/__fixture__/pod1.turtle b/app/k8s_transform/__fixture__/pod1.turtle index 1b53b3b..00e0584 100644 --- a/app/k8s_transform/__fixture__/pod1.turtle +++ b/app/k8s_transform/__fixture__/pod1.turtle @@ -1,5 +1,6 @@ cluster:coredns-787d4945fb rdf:type glc:AssignedTask . cluster:coredns-787d4945fb glc:hasDescription "ReplicaSet" . +cluster:coredns-787d4945fb glc:hasID cluster:coredns-787d4945fb . cluster:coredns-787d4945fb glc:makes cluster:kube-system.coredns-787d4945fb-l85r5 . cluster:glaciation-test-master01 rdf:type glc:WorkProducingResource . cluster:glaciation-test-master01 glc:hasDescription "KubernetesWorkerNode" . diff --git a/app/k8s_transform/__fixture__/pod2.jsonld b/app/k8s_transform/__fixture__/pod2.jsonld index f6b35eb..9ccbd1f 100644 --- a/app/k8s_transform/__fixture__/pod2.jsonld +++ b/app/k8s_transform/__fixture__/pod2.jsonld @@ -17,6 +17,7 @@ "@id": "cluster:tenant1-pool-0", "@type": "glc:AssignedTask", "glc:hasDescription": "StatefulSet", + "glc:hasID": "cluster:tenant1-pool-0", "glc:makes": "cluster:tenant1.tenant1-pool-0-1" }, { diff --git a/app/k8s_transform/__fixture__/pod2.turtle b/app/k8s_transform/__fixture__/pod2.turtle index d004b0c..df34903 100644 --- a/app/k8s_transform/__fixture__/pod2.turtle +++ b/app/k8s_transform/__fixture__/pod2.turtle @@ -1,5 +1,6 @@ cluster:tenant1-pool-0 rdf:type glc:AssignedTask . cluster:tenant1-pool-0 glc:hasDescription "StatefulSet" . +cluster:tenant1-pool-0 glc:hasID cluster:tenant1-pool-0 . cluster:tenant1-pool-0 glc:makes cluster:tenant1.tenant1-pool-0-1 . cluster:tenant1.tenant1-pool-0-1 rdf:type glc:WorkProducingResource . cluster:tenant1.tenant1-pool-0-1 glc:hasDescription "Pod" . diff --git a/app/k8s_transform/__fixture__/pod3.jsonld b/app/k8s_transform/__fixture__/pod3.jsonld index d931509..30c81e4 100644 --- a/app/k8s_transform/__fixture__/pod3.jsonld +++ b/app/k8s_transform/__fixture__/pod3.jsonld @@ -55,6 +55,7 @@ "@id": "cluster:kube-flannel-ds", "@type": "glc:AssignedTask", "glc:hasDescription": "DaemonSet", + "glc:hasID": "cluster:kube-flannel-ds", "glc:makes": "cluster:kube-flannel.kube-flannel-ds-848v8" }, { diff --git a/app/k8s_transform/__fixture__/pod3.turtle b/app/k8s_transform/__fixture__/pod3.turtle index bdcb4d6..241a52c 100644 --- a/app/k8s_transform/__fixture__/pod3.turtle +++ b/app/k8s_transform/__fixture__/pod3.turtle @@ -3,6 +3,7 @@ cluster:glaciation-worker04 glc:hasDescription "KubernetesWorkerNode" . cluster:glaciation-worker04 glc:hasID cluster:glaciation-worker04 . cluster:kube-flannel-ds rdf:type glc:AssignedTask . cluster:kube-flannel-ds glc:hasDescription "DaemonSet" . +cluster:kube-flannel-ds glc:hasID cluster:kube-flannel-ds . cluster:kube-flannel-ds glc:makes cluster:kube-flannel.kube-flannel-ds-848v8 . cluster:kube-flannel.kube-flannel-ds-848v8 rdf:type glc:WorkProducingResource . cluster:kube-flannel.kube-flannel-ds-848v8 glc:hasDescription "Pod" . diff --git a/app/k8s_transform/__fixture__/replicaset.jsonld b/app/k8s_transform/__fixture__/replicaset.jsonld index cf8c7e0..3524651 100644 --- a/app/k8s_transform/__fixture__/replicaset.jsonld +++ b/app/k8s_transform/__fixture__/replicaset.jsonld @@ -25,6 +25,7 @@ "@id": "cluster:coredns", "@type": "glc:AssignedTask", "glc:hasDescription": "Deployment", + "glc:hasID": "cluster:coredns", "glc:hasSubTask": { "@id": "cluster:coredns-787d4945fb", "@type": "glc:AssignedTask", diff --git a/app/k8s_transform/__fixture__/replicaset.turtle b/app/k8s_transform/__fixture__/replicaset.turtle index 4d795f0..7a9e6cc 100644 --- a/app/k8s_transform/__fixture__/replicaset.turtle +++ b/app/k8s_transform/__fixture__/replicaset.turtle @@ -1,5 +1,6 @@ cluster:coredns rdf:type glc:AssignedTask . cluster:coredns glc:hasDescription "Deployment" . +cluster:coredns glc:hasID cluster:coredns . cluster:coredns glc:hasSubTask cluster:coredns-787d4945fb . cluster:coredns-787d4945fb rdf:type glc:AssignedTask . cluster:coredns-787d4945fb glc:hasDescription "ReplicaSet" . diff --git a/app/k8s_transform/__fixture__/statefulset.jsonld b/app/k8s_transform/__fixture__/statefulset.jsonld index 8b1772f..ffffa5b 100644 --- a/app/k8s_transform/__fixture__/statefulset.jsonld +++ b/app/k8s_transform/__fixture__/statefulset.jsonld @@ -25,6 +25,7 @@ "@id": "cluster:tenant1", "@type": "glc:AssignedTask", "glc:hasDescription": "Tenant", + "glc:hasID": "cluster:tenant1", "glc:makes": { "@id": "cluster:tenant1-pool-0", "@type": "glc:AssignedTask", diff --git a/app/k8s_transform/__fixture__/statefulset.turtle b/app/k8s_transform/__fixture__/statefulset.turtle index f3da0e5..dbb0ac7 100644 --- a/app/k8s_transform/__fixture__/statefulset.turtle +++ b/app/k8s_transform/__fixture__/statefulset.turtle @@ -1,5 +1,6 @@ cluster:tenant1 rdf:type glc:AssignedTask . cluster:tenant1 glc:hasDescription "Tenant" . +cluster:tenant1 glc:hasID cluster:tenant1 . cluster:tenant1 glc:makes cluster:tenant1-pool-0 . cluster:tenant1-pool-0 rdf:type glc:AssignedTask . cluster:tenant1-pool-0 glc:hasDescription "StatefulSet" . diff --git a/app/k8s_transform/cluster_transformer.py b/app/k8s_transform/cluster_transformer.py index 8c41d4b..6fd270e 100644 --- a/app/k8s_transform/cluster_transformer.py +++ b/app/k8s_transform/cluster_transformer.py @@ -43,9 +43,5 @@ def get_cluster_id(self, config: Dict[str, Any]) -> IRI: def write_node_reference(self, cluster_id: IRI, node: Dict[str, Any]) -> None: node_id = self.get_node_id(node) + self.add_work_producing_resource(node_id, "KubernetesWorkerNode") self.sink.add_relation(cluster_id, self.HAS_SUBRESOURCE, node_id) - self.sink.add_meta_property( - node_id, - Graph.RDF_TYPE_IRI, - IRI(self.GLACIATION_PREFIX, "WorkProducingResource"), - ) diff --git a/app/k8s_transform/node_transformer.py b/app/k8s_transform/node_transformer.py index 2a4b0e1..0e034f9 100644 --- a/app/k8s_transform/node_transformer.py +++ b/app/k8s_transform/node_transformer.py @@ -1,7 +1,4 @@ -from typing import Any, Dict, Optional - -from jsonpath_ng.ext import parse -from kubernetes.utils.quantity import parse_quantity +from typing import Any, Dict from app.k8s_transform.transformation_context import TransformationContext from app.k8s_transform.transformer_base import TransformerBase @@ -29,9 +26,11 @@ def transform(self, context: TransformationContext) -> None: def add_cpu_resource(self, node_id: IRI, timestamp: int) -> None: cpu_id = node_id.dot("CPU") - self.sink.add_relation(node_id, self.HAS_SUBRESOURCE, cpu_id) self.add_work_producing_resource(cpu_id, "CPU") - cpu_capacity_value = self.get_int_quantity_value("$.status.allocatable.cpu") + self.sink.add_relation(node_id, self.HAS_SUBRESOURCE, cpu_id) + cpu_capacity_value = self.get_opt_int_quantity_value( + ["status", "allocatable", "cpu"] + ) if cpu_capacity_value: cpu_capacity_id = cpu_id.dot("Capacity") self.add_measurement( @@ -50,7 +49,9 @@ def add_memory_resource(self, node_id: IRI, timestamp: int) -> None: self.add_work_producing_resource(ram_id, "RAM") self.sink.add_relation(node_id, self.HAS_SUBRESOURCE, ram_id) - ram_capacity_value = self.get_int_quantity_value("$.status.allocatable.memory") + ram_capacity_value = self.get_opt_int_quantity_value( + ["status", "allocatable", "memory"] + ) if ram_capacity_value: ram_capacity_id = ram_id.dot("Capacity") self.add_measurement( @@ -69,8 +70,8 @@ def add_storage_resource(self, node_id: IRI, timestamp: int) -> None: self.add_work_producing_resource(storage_id, "EphemeralStorage") self.sink.add_relation(node_id, self.HAS_SUBRESOURCE, storage_id) - storage_capacity_value = self.get_int_quantity_value( - "$.status.allocatable.ephemeral-storage" + storage_capacity_value = self.get_opt_int_quantity_value( + ["status", "allocatable", "ephemeral-storage"] ) if storage_capacity_value: storage_capacity_id = storage_id.dot("Capacity") @@ -98,8 +99,12 @@ def add_gpu_resource(self, node_id: IRI, _: int) -> None: self.sink.add_relation(node_id, self.HAS_SUBRESOURCE, storage_id) def add_energy_information(self, node_id: IRI, timestamp: int) -> None: - energy_index_value = self.get_int_quantity_value( - '$.metadata.annotations["glaciation-project.eu/metric/node-energy-index"]' + energy_index_value = self.get_opt_int_quantity_value( + [ + "metadata", + "annotations", + "glaciation-project.eu/metric/node-energy-index", + ] ) if energy_index_value: energy_index_id = node_id.dot("Energy.Index") @@ -134,8 +139,3 @@ def get_node_status(self) -> str: return "NotReady" else: return "Unknown" - - def get_int_quantity_value(self, query: str) -> Optional[int]: - for match in parse(query).find(self.source): - return int(parse_quantity(match.value)) - return None diff --git a/app/k8s_transform/pod_transformer.py b/app/k8s_transform/pod_transformer.py index 6752f2e..89f9637 100644 --- a/app/k8s_transform/pod_transformer.py +++ b/app/k8s_transform/pod_transformer.py @@ -1,9 +1,7 @@ -from typing import Any, Dict, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple import re -from jsonpath_ng.ext import parse - from app.k8s_transform.transformation_context import TransformationContext from app.k8s_transform.transformer_base import TransformerBase from app.k8s_transform.upper_ontology_base import UpperOntologyBase @@ -22,7 +20,7 @@ def transform(self, _: TransformationContext) -> None: self.add_work_producing_resource(pod_id, "Pod") self.add_references(pod_id, "Pod") scheduler_name = ( - self.get_str_value("$.spec.schedulerName") or "default-scheduler" + self.get_opt_str_value(["spec", "schedulerName"]) or "default-scheduler" ) self.add_str_property(pod_id, self.HAS_NAME, "$.metadata.name") self.add_scheduler_reference(pod_id, scheduler_name) @@ -36,35 +34,34 @@ def add_scheduler_reference(self, resource_id: IRI, scheduler_name: str) -> None self.sink.add_relation(scheduler_id, self.MANAGES, resource_id) def add_node_reference(self, pod_id: IRI) -> None: - node_name = self.get_str_value("$.spec.nodeName") + node_name = self.get_opt_str_value(["spec", "nodeName"]) if node_name: node_id = IRI(self.CLUSTER_PREFIX, node_name) - self.sink.add_relation(pod_id, self.CONSUMES, node_id) self.add_work_producing_resource(node_id, "KubernetesWorkerNode") + self.sink.add_relation(pod_id, self.CONSUMES, node_id) def add_pod_status(self, pod_id: IRI) -> None: status_id = pod_id.dot("Status") - start_time = self.get_str_value("$.status.startTime") - status = self.get_str_value("$.status.phase") + start_time = self.get_opt_str_value(["status", "startTime"]) + status = self.get_opt_str_value(["status", "phase"]) if status: # TODO if start_time is None self.add_status(status_id, status, start_time or "", None) def add_containers_resources(self, pod_id: IRI, scheduler_name: str) -> None: self.add_container_resources_by_query( - pod_id, scheduler_name, "$.status.containerStatuses" + pod_id, scheduler_name, ["status", "containerStatuses"] ) self.add_container_resources_by_query( - pod_id, scheduler_name, "$.status.initContainerStatuses" + pod_id, scheduler_name, ["status", "initContainerStatuses"] ) def add_container_resources_by_query( - self, pod_id: IRI, scheduler_name: str, jsonpath: str + self, pod_id: IRI, scheduler_name: str, jsonpath: List[str] ) -> None: - container_status_matches = parse(jsonpath).find(self.source) - if len(container_status_matches) > 0: - for container_match in container_status_matches[0].value: - self.add_container_resource(pod_id, container_match, scheduler_name) + container_status_matches = self.get_opt_list(jsonpath) or [] + for container_match in container_status_matches: + self.add_container_resource(pod_id, container_match, scheduler_name) def add_container_resource( self, pod_id: IRI, container: Dict[str, Any], scheduler_name: str @@ -72,6 +69,8 @@ def add_container_resource( k8s_container_id = container.get("containerID") or "undefined" k8s_container_name = container.get("name") or "undefined" container_id = pod_id.dot(k8s_container_name) + self.add_work_producing_resource(container_id, "Container") + self.add_scheduler_reference(container_id, scheduler_name) self.sink.add_property( container_id, self.HAS_CONTAINER_ID, @@ -82,8 +81,6 @@ def add_container_resource( self.HAS_CONTAINER_NAME, Literal(k8s_container_name, Literal.TYPE_STRING), ) - self.add_work_producing_resource(container_id, "Container") - self.add_scheduler_reference(container_id, scheduler_name) state = container.get("state") if state: self.add_container_status(container_id, state) diff --git a/app/k8s_transform/transformer_base.py b/app/k8s_transform/transformer_base.py index 6ccf2e0..c4291da 100644 --- a/app/k8s_transform/transformer_base.py +++ b/app/k8s_transform/transformer_base.py @@ -1,8 +1,9 @@ -from typing import Any, Dict, Optional, Set, Tuple +from typing import Any, Dict, List, Optional, Set, Tuple import re from jsonpath_ng.ext import parse +from kubernetes.utils.quantity import parse_quantity from app.k8s_transform.transformation_context import TransformationContext from app.k8s_transform.upper_ontology_base import UpperOntologyBase @@ -111,17 +112,17 @@ def normalize(self, value: str) -> str: return re.sub('["\n]', "", value) def get_id(self) -> IRI: - name = parse("$.metadata.name").find(self.source)[0].value + name = self.source["metadata"]["name"] resource_id = IRI(self.CLUSTER_PREFIX, name) return resource_id def get_pod_id(self) -> IRI: - name = parse("$.metadata.name").find(self.source)[0].value - namespace = parse("$.metadata.namespace").find(self.source)[0].value + name = self.source["metadata"]["name"] + namespace = self.source["metadata"]["namespace"] return IRI(self.CLUSTER_PREFIX, namespace).dot(name) def get_node_id(self, node_resource: Dict[str, Any]) -> IRI: - name = parse("$.metadata.name").find(node_resource)[0].value + name = node_resource["metadata"]["name"] resource_id = IRI(self.CLUSTER_PREFIX, name) return resource_id @@ -136,10 +137,10 @@ def get_int_value(self, query: str) -> Optional[int]: return None def add_references(self, node_id: IRI, target_kind: str) -> None: - references_match = parse("$.metadata.ownerReferences").find(self.source) - if len(references_match) == 0: + references_matches = self.source["metadata"].get("ownerReferences") or [] + if len(references_matches) == 0: return - for reference_match in references_match[0].value: + for reference_match in references_matches: reference, src_kind = self.get_reference_id(reference_match) src_type = ( self.RESOURCE_TYPE_MAP.get(src_kind) or UpperOntologyBase.ASSIGNED_TASK @@ -150,6 +151,7 @@ def add_references(self, node_id: IRI, target_kind: str) -> None: ) self.sink.add_relation(reference, relation, node_id) self.sink.add_meta_property(reference, Graph.RDF_TYPE_IRI, src_type) + self.sink.add_relation(reference, UpperOntologyBase.HAS_ID, reference) self.sink.add_property( reference, UpperOntologyBase.HAS_DESCRIPTION, @@ -163,3 +165,88 @@ def add_str_property(self, subject: IRI, property: IRI, query: str) -> None: property, Literal(match.value, Literal.TYPE_STRING), ) + + def get_opt_str_value(self, query_path: List[str]) -> Optional[str]: + if len(query_path) == 0: + return None + current: Dict[str, Any] = self.source + result: Optional[str] = None + for subpath in query_path: + next = current.get(subpath) + if not next: + return None + current = next + if isinstance(next, str): + result = str(current) + return result + + def get_opt_struct(self, query_path: List[str]) -> Optional[Dict[str, Any]]: + if len(query_path) == 0: + return None + current: Dict[str, Any] = self.source + for subpath in query_path: + next = current.get(subpath) + if not next: + return None + current = next + return current + + def get_opt_list(self, query_path: List[str]) -> Optional[List[Dict[str, Any]]]: + if len(query_path) == 0: + return None + current: Dict[str, Any] = self.source + result: List[Dict[str, Any]] = [] + for subpath in query_path: + next = current.get(subpath) + if not next: + return None + current = next + result = next + return result + + def get_opt_int_quantity_value(self, query_path: List[str]) -> Optional[int]: + if len(query_path) == 0: + return None + current: Dict[str, Any] = self.source + result: Any = "0" + for subpath in query_path: + next = current.get(subpath) + if not next: + return None + current = next + result = next + return int(parse_quantity(result)) + + def get_str_list(self, query_path: List[str]) -> List[str]: + if len(query_path) == 0: + return [] + results: List[str] = [] + self.fetch_level([self.source], query_path, 0, results) + return results + + def fetch_level( + self, + current: List[Dict[str, Any]], + query_path: List[str], + query_path_i: int, + result: List[str], + ) -> None: + if query_path_i == len(query_path) - 1: + path = query_path[query_path_i] + for current_level in current: + query_result = current_level.get(path) + if query_result: + result.append(query_result) + else: + path = query_path[query_path_i] + for current_level in current: + next_level = current_level.get(path) + if next_level: + if isinstance(next_level, list): + self.fetch_level( + next_level, query_path, query_path_i + 1, result + ) + else: + self.fetch_level( + [next_level], query_path, query_path_i + 1, result + ) diff --git a/app/k8s_transform/upper_ontology_base.py b/app/k8s_transform/upper_ontology_base.py index 54e352f..4f43364 100644 --- a/app/k8s_transform/upper_ontology_base.py +++ b/app/k8s_transform/upper_ontology_base.py @@ -262,6 +262,8 @@ def add_subresource(self, identifier: IRI, subresource: IRI) -> None: def add_common_info( self, entity_id: IRI, entity_type: IRI, description: Optional[str] ) -> None: + if self.sink.has_node(entity_id): + return self.sink.add_meta_property(entity_id, Graph.RDF_TYPE_IRI, entity_type) self.sink.add_relation(entity_id, self.HAS_ID, entity_id) if description: diff --git a/app/k8s_transform/workload_transformer.py b/app/k8s_transform/workload_transformer.py index 526446e..6aaf8e4 100644 --- a/app/k8s_transform/workload_transformer.py +++ b/app/k8s_transform/workload_transformer.py @@ -1,6 +1,5 @@ from typing import Any, Dict, List, Optional -from jsonpath_ng.ext import parse from kubernetes.utils.quantity import parse_quantity from app.k8s_transform.transformation_context import TransformationContext @@ -17,7 +16,7 @@ def __init__(self, source: Dict[str, Any], sink: Graph): def transform(self, _: TransformationContext) -> None: workload_id = self.get_id() - kind = self.get_str_value("$.kind") + kind = self.source["kind"] self.add_assigned_task(workload_id, kind) if kind: self.add_references(workload_id, kind) @@ -31,8 +30,24 @@ def add_soft_constraints(self, workload_id: IRI) -> None: "CPU.Allocated", True, [ - "$.spec.template.spec.containers[*].resources.requests.cpu", - "$.spec.template.spec.initContainers[*].resources.requests.cpu", + [ + "spec", + "template", + "spec", + "containers", + "resources", + "requests", + "cpu", + ], + [ + "spec", + "template", + "spec", + "initContainers", + "resources", + "requests", + "cpu", + ], ], self.UNIT_CPU_CORE_ID, self.ASPECT_PERFORMANCE_ID, @@ -43,8 +58,24 @@ def add_soft_constraints(self, workload_id: IRI) -> None: "RAM.Allocated", True, [ - "$.spec.template.spec.containers[*].resources.requests.memory", - "$.spec.template.spec.initContainers[*].resources.requests.memory", + [ + "spec", + "template", + "spec", + "containers", + "resources", + "requests", + "memory", + ], + [ + "spec", + "template", + "spec", + "initContainers", + "resources", + "requests", + "memory", + ], ], self.UNIT_BYTES_ID, self.ASPECT_PERFORMANCE_ID, @@ -55,8 +86,24 @@ def add_soft_constraints(self, workload_id: IRI) -> None: "Storage.Allocated", True, [ - "$.spec.template.spec.containers[*].resources.requests.ephemeral-storage", # noqa: E501 - "$.spec.template.spec.initContainers[*].resources.requests.ephemeral-storage", # noqa: E501 + [ + "spec", + "template", + "spec", + "containers", + "resources", + "requests", + "ephemeral-storage", + ], # noqa: E501 + [ + "spec", + "template", + "spec", + "initContainers", + "resources", + "requests", + "ephemeral-storage", + ], # noqa: E501 ], self.UNIT_BYTES_ID, self.ASPECT_PERFORMANCE_ID, @@ -67,7 +114,13 @@ def add_soft_constraints(self, workload_id: IRI) -> None: "GPU.Allocated", True, [ - "$.spec.template.metadata.annotations['glaciation-project.eu/resource/requests/gpu']" # noqa: E501 + [ + "spec", + "template", + "metadata", + "annotations", + "glaciation-project.eu/resource/requests/gpu", + ] # noqa: E501 ], self.UNIT_CPU_CORE_ID, self.ASPECT_PERFORMANCE_ID, @@ -78,7 +131,13 @@ def add_soft_constraints(self, workload_id: IRI) -> None: "Network.Allocated", True, [ - "$.spec.template.metadata.annotations['glaciation-project.eu/resource/requests/network']" # noqa: E501 + [ + "spec", + "template", + "metadata", + "annotations", + "glaciation-project.eu/resource/requests/network", + ] # noqa: E501 ], self.UNIT_BYTES_ID, self.ASPECT_PERFORMANCE_ID, @@ -89,7 +148,13 @@ def add_soft_constraints(self, workload_id: IRI) -> None: "Energy.Allocated", True, [ - "$.spec.template.metadata.annotations['glaciation-project.eu/resource/requests/energy']" # noqa: E501 + [ + "spec", + "template", + "metadata", + "annotations", + "glaciation-project.eu/resource/requests/energy", + ] # noqa: E501 ], self.UNIT_MILLIWATT_ID, self.ASPECT_POWER_ID, @@ -101,8 +166,24 @@ def add_hard_constraints(self, workload_id: IRI) -> None: "CPU.Capacity", False, [ - "$.spec.template.spec.containers[*].resources.limits.cpu", - "$.spec.template.spec.initContainers[*].resources.limits.cpu", + [ + "spec", + "template", + "spec", + "containers", + "resources", + "limits", + "cpu", + ], + [ + "spec", + "template", + "spec", + "initContainers", + "resources", + "limits", + "cpu", + ], ], self.UNIT_CPU_CORE_ID, self.ASPECT_PERFORMANCE_ID, @@ -113,8 +194,24 @@ def add_hard_constraints(self, workload_id: IRI) -> None: "RAM.Capacity", False, [ - "$.spec.template.spec.containers[*].resources.limits.memory", - "$.spec.template.spec.initContainers[*].resources.limits.memory", + [ + "spec", + "template", + "spec", + "containers", + "resources", + "limits", + "memory", + ], + [ + "spec", + "template", + "spec", + "initContainers", + "resources", + "limits", + "memory", + ], ], self.UNIT_BYTES_ID, self.ASPECT_PERFORMANCE_ID, @@ -125,8 +222,24 @@ def add_hard_constraints(self, workload_id: IRI) -> None: "Storage.Capacity", False, [ - "$.spec.template.spec.containers[*].resources.limits.ephemeral-storage", # noqa: E501 - "$.spec.template.spec.initContainers[*].resources.limits.ephemeral-storage", # noqa: E501 + [ + "spec", + "template", + "spec", + "containers", + "resources", + "limits", + "ephemeral-storage", + ], # noqa: E501 + [ + "spec", + "template", + "spec", + "initContainers", + "resources", + "limits", + "ephemeral-storage", + ], # noqa: E501 ], self.UNIT_BYTES_ID, self.ASPECT_PERFORMANCE_ID, @@ -137,7 +250,13 @@ def add_hard_constraints(self, workload_id: IRI) -> None: "GPU.Capacity", False, [ - "$.spec.template.metadata.annotations['glaciation-project.eu/resource/limits/gpu']" # noqa: E501 + [ + "spec", + "template", + "metadata", + "annotations", + "glaciation-project.eu/resource/limits/gpu", + ] # noqa: E501 ], self.UNIT_CPU_CORE_ID, self.ASPECT_PERFORMANCE_ID, @@ -148,7 +267,13 @@ def add_hard_constraints(self, workload_id: IRI) -> None: "Network.Capacity", False, [ - "$.spec.template.metadata.annotations['glaciation-project.eu/resource/limits/network']" # noqa: E501 + [ + "spec", + "template", + "metadata", + "annotations", + "glaciation-project.eu/resource/limits/network", + ] # noqa: E501 ], self.UNIT_BYTES_ID, self.ASPECT_PERFORMANCE_ID, @@ -159,7 +284,13 @@ def add_hard_constraints(self, workload_id: IRI) -> None: "Energy.Capacity", False, [ - "$.spec.template.metadata.annotations['glaciation-project.eu/resource/limits/energy']" # noqa: E501 + [ + "spec", + "template", + "metadata", + "annotations", + "glaciation-project.eu/resource/limits/energy", + ] # noqa: E501 ], self.UNIT_MILLIWATT_ID, self.ASPECT_POWER_ID, @@ -170,7 +301,7 @@ def add_resource_constraint_by_jpath( workload_id: IRI, constraint_id_name: str, is_soft: bool, - jqpath: List[str], + jqpath: List[List[str]], unit: IRI, aspect: IRI, ) -> None: @@ -187,27 +318,25 @@ def add_resource_constraint_by_jpath( ) self.sink.add_relation(workload_id, self.HAS_CONSTRAINT, constraint_id) - def get_int_quantity_value(self, query: str) -> Optional[int]: - for match in parse(query).find(self.source): - return int(parse_quantity(match.value)) - return None + def add_workload_scheduler(self, workload_id: IRI) -> None: + scheduler_name = self.get_opt_str_value( + ["spec", "template", "spec", "schedulerName"] + ) + scheduler_id = IRI( + self.GLACIATION_PREFIX, scheduler_name or "default-scheduler" + ) + self.add_scheduler(scheduler_id, None) + self.sink.add_relation(scheduler_id, self.ASSIGNS, workload_id) - def get_int_quantity_value_list(self, queries: List[str]) -> Optional[float]: + def get_int_quantity_value_list(self, queries: List[List[str]]) -> Optional[float]: result = [] for query in queries: - for match in parse(query).find(self.source): - quantity = float(parse_quantity(match.value)) + values = self.get_str_list(query) + for quantity_str in values: + quantity = float(parse_quantity(quantity_str)) if quantity > 0: result.append(quantity) if len(result) > 0: return sum(result) else: return None - - def add_workload_scheduler(self, workload_id: IRI) -> None: - scheduler_name = self.get_str_value("$.spec.template.spec.schedulerName") - scheduler_id = IRI( - self.GLACIATION_PREFIX, scheduler_name or "default-scheduler" - ) - self.add_scheduler(scheduler_id, None) - self.sink.add_relation(scheduler_id, self.ASSIGNS, workload_id) From 57868f7cbf9c07461c8a6867ccd109b85b2c82a0 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 18 Jun 2024 10:37:25 +0200 Subject: [PATCH 39/61] HHT-669: single slice url --- .../metadata_service_client_impl.py | 12 +++++++-- .../metadata_service_settings.py | 1 + app/kgexporter_context_builder.py | 27 +++++++++++++++++-- 3 files changed, 36 insertions(+), 4 deletions(-) diff --git a/app/clients/metadata_service/metadata_service_client_impl.py b/app/clients/metadata_service/metadata_service_client_impl.py index 8693696..17c375a 100644 --- a/app/clients/metadata_service/metadata_service_client_impl.py +++ b/app/clients/metadata_service/metadata_service_client_impl.py @@ -23,7 +23,8 @@ def __init__(self, settings: MetadataServiceSettings): self.settings = settings async def query(self, host_and_port: str, sparql: str) -> List[Triple]: - url = f"http://{host_and_port}/api/v0/graph/search" + base_url = self.get_base_url(host_and_port) + url = f"{base_url}/api/v0/graph/search" async with httpx.AsyncClient() as client: try: response = await client.post( @@ -38,7 +39,8 @@ async def query(self, host_and_port: str, sparql: str) -> List[Triple]: raise ClientError(e.args[0]) from e async def insert(self, host_and_port: str, graph: str) -> None: - url = f"http://{host_and_port}/api/v0/graph" + base_url = self.get_base_url(host_and_port) + url = f"{base_url}/api/v0/graph" async with httpx.AsyncClient() as client: try: response = await client.patch( @@ -49,3 +51,9 @@ async def insert(self, host_and_port: str, graph: str) -> None: response.raise_for_status() except HTTPError as e: raise ClientError(e.args[0]) from e + + def get_base_url(self, host_and_port: str) -> str: + if self.settings.use_single_url: + return f"http://{self.settings.metadata_service_url}" + else: + return f"http://{host_and_port}" diff --git a/app/clients/metadata_service/metadata_service_settings.py b/app/clients/metadata_service/metadata_service_settings.py index 184bd8b..bade1e7 100644 --- a/app/clients/metadata_service/metadata_service_settings.py +++ b/app/clients/metadata_service/metadata_service_settings.py @@ -4,3 +4,4 @@ class MetadataServiceSettings(BaseSettings): metadata_service_url: str = "metadata-service" metadata_service_push_period_sec: int = 60 + use_single_url: bool = True diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index 1de1004..b54b766 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple import argparse from argparse import Namespace @@ -9,6 +9,9 @@ from app.clients.metadata_service.metadata_service_client_impl import ( MetadataServiceClientImpl, ) +from app.k8s_transform.upper_ontology_base import UpperOntologyBase +from app.kg.id_base import IdBase +from app.kg.iri import IRI from app.kgexporter_context import KGExporterContext from app.kgexporter_settings import KGExporterSettings from app.pydantic_yaml import from_yaml @@ -59,7 +62,7 @@ def build(self) -> KGExporterContext: metadata_client = MetadataServiceClientImpl(self.settings.metadata) k8s_client = K8SClientImpl(self.settings.k8s) influxdb_client = InfluxDBClientImpl(self.settings.influxdb) - jsonld_config = JsonLDConfiguration(contexts=dict(), aggregates=set()) + jsonld_config = self.get_jsonld_config() context = KGExporterContext( clock, @@ -70,3 +73,23 @@ def build(self) -> KGExporterContext: self.settings, ) return context + + def get_jsonld_config(self) -> JsonLDConfiguration: + contexts: Dict[IdBase, Dict[str, Any]] = { + JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + } + } + return JsonLDConfiguration( + contexts, + { + IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasuringResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementUnit"), + }, + ) From 36dcb4e49821818eba796a2cb2b85397e5af964e Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 18 Jun 2024 13:04:38 +0200 Subject: [PATCH 40/61] HHT-669: permissions to get cluster resources --- charts/app/templates/clusterrole.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/charts/app/templates/clusterrole.yaml b/charts/app/templates/clusterrole.yaml index f59f2b2..9af23f0 100644 --- a/charts/app/templates/clusterrole.yaml +++ b/charts/app/templates/clusterrole.yaml @@ -4,5 +4,8 @@ metadata: name: {{ include "app.fullname" . }}-role rules: - apiGroups: ["apps", "batch"] - resources: ["deployments", "statefulsets", "jobs"] - verbs: ["list", "watch"] + resources: ["deployments", "jobs", "statefulsets", "daemonsets", "replicasets"] + verbs: ["list", "watch", "get"] +- apiGroups: [""] + resources: ["configmaps", "deployments", "statefulsets", "jobs", "nodes", "daemonsets", "replicasets", "pods"] + verbs: ["get", "list"] From 0dd2eb40b6f41c3b3ae42c1c59a1309341665968 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 18 Jun 2024 13:24:06 +0200 Subject: [PATCH 41/61] HHT-669: do not update slice if there are no changes --- app/core/dkg_slice_store.py | 2 +- app/core/kg_updater.py | 17 +++++++++++------ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/app/core/dkg_slice_store.py b/app/core/dkg_slice_store.py index 39fe863..347bcb4 100644 --- a/app/core/dkg_slice_store.py +++ b/app/core/dkg_slice_store.py @@ -11,7 +11,7 @@ def __init__(self): def update(self, slice: DKGSlice) -> bool: existing = self.slices.get(slice.slice_id) - if existing != slice: + if not existing or existing.graph != slice.graph: self.slices[slice.slice_id] = slice return True else: diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py index 62c8a7c..8b0a5b2 100644 --- a/app/core/kg_updater.py +++ b/app/core/kg_updater.py @@ -3,6 +3,7 @@ from loguru import logger from app.core.async_queue import AsyncQueue +from app.core.dkg_slice_store import DKGSliceStore from app.core.kg_repository import KGRepository from app.core.types import DKGSlice @@ -11,6 +12,7 @@ class KGUpdater: queue: AsyncQueue[DKGSlice] kg_repository: KGRepository terminated: asyncio.Event + slices: DKGSliceStore def __init__( self, @@ -21,6 +23,7 @@ def __init__( self.queue = queue self.kg_repository = kg_repository self.terminated = terminated + self.slices = DKGSliceStore() async def run(self) -> None: logger.info("Updater started.") @@ -34,11 +37,13 @@ async def run(self) -> None: async def run_cycle(self) -> None: slice = self.queue.get_nowait() if slice: - logger.info( - "updating slice {slice}, with timestamp {timestamp}", - slice=slice.slice_id, - timestamp=slice.timestamp, - ) - await self.kg_repository.update(slice.slice_id, slice.graph) + to_update = self.slices.update(slice) + if to_update: + logger.info( + "updating slice {slice}, with timestamp {timestamp}", + slice=slice.slice_id, + timestamp=slice.timestamp, + ) + await self.kg_repository.update(slice.slice_id, slice.graph) else: await asyncio.sleep(0.5) From 230aeed89fc752ca7c75c2ddbbfaacfed1f112b5 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 18 Jun 2024 13:36:52 +0200 Subject: [PATCH 42/61] HHT-669: updating lock file --- poetry.lock | 105 ++++++++++++++++++++++++++++------------------------ 1 file changed, 57 insertions(+), 48 deletions(-) diff --git a/poetry.lock b/poetry.lock index f1fa9e2..62b5a89 100644 --- a/poetry.lock +++ b/poetry.lock @@ -522,18 +522,18 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" +pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] @@ -1270,47 +1270,56 @@ files = [ [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.0" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787"}, + {file = "numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98"}, + {file = "numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f"}, + {file = "numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f"}, + {file = "numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, + {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, + {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44"}, + {file = "numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275"}, + {file = "numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, + {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, ] [[package]] @@ -1629,13 +1638,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] @@ -2198,13 +2207,13 @@ devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3) [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, + {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, ] [package.extras] From f5c3379691285e1e1422c276bc60cf433cad0fff Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 18 Jun 2024 13:38:40 +0200 Subject: [PATCH 43/61] HHT-669: cleanup --- app/kgexporter_context.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 8a33dee..e8cf58d 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -10,7 +10,6 @@ from app.clients.k8s.k8s_client import K8SClient from app.clients.metadata_service.metadata_service_client import MetadataServiceClient from app.core.async_queue import AsyncQueue -from app.core.dkg_slice_store import DKGSliceStore from app.core.kg_builder import KGBuilder from app.core.kg_repository import KGRepository from app.core.kg_updater import KGUpdater @@ -26,7 +25,6 @@ class KGExporterContext: updater: KGUpdater queue: AsyncQueue[DKGSlice] runner: asyncio.Runner - dkg_slice_store: DKGSliceStore terminated: asyncio.Event prometheus_server: WSGIServer tasks: List[asyncio.Task[Any]] @@ -46,7 +44,6 @@ def __init__( influxdb_repository = MetricRepository(influxdb_client) self.terminated = asyncio.Event() self.queue = AsyncQueue[DKGSlice]() - self.dkg_slice_store = DKGSliceStore() self.builder = KGBuilder( self.terminated, clock, From 2c058687ec9af28fdca8efb401b994d45d4cb414 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 20 Jun 2024 12:36:18 +0200 Subject: [PATCH 44/61] HHT-669: temporary disable aiocsv --- poetry.lock | 51 +++++++++----------------------------------------- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 43 deletions(-) diff --git a/poetry.lock b/poetry.lock index 62b5a89..4d667ca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,38 +1,5 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. -[[package]] -name = "aiocsv" -version = "1.3.2" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiocsv-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1996ac960c196aecc7d22e701c273a2676d13bf25575af78d4e515fc724ef20"}, - {file = "aiocsv-1.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd688dbc1723f2b3a433e42041ceb9c9a8fe70f547d35b2da4ea31e4c78efc5"}, - {file = "aiocsv-1.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2f921828e386bb6945ed7d268e1524349ea506974ae35b9772542714f0ef3efd"}, - {file = "aiocsv-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:198c905ec29897c347bf9b18eb410af13d7ac94a03d4b673e64eaa5f4557c913"}, - {file = "aiocsv-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c25ad8afbf79d28ec3320e608c7f38d3eff93e96ebbbd2430ae8fa0f6e7631b"}, - {file = "aiocsv-1.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4004569bff39cb839a335b8f673a6496fd5b0b6e074c7adb7aee4a0c8379ea22"}, - {file = "aiocsv-1.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e9c98f8d760add0b52274523baa4b81dde4a3c96f79222d3d4d6965bac9cdcbd"}, - {file = "aiocsv-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:9edb342b0d7dba94d8976f46ba5814b8d8704d67a45e1b8a6579ab0ba04309e7"}, - {file = "aiocsv-1.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:db943a463cb6828ba81bd7c083c6dd4c96edac4880b8638af81798d694405e26"}, - {file = "aiocsv-1.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10780033a1ed3da825f2256449d177b7106b3c5a2d64bd683eab37f1fdee1e36"}, - {file = "aiocsv-1.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8c7aee34ceff4eaa654f01acbdba648297f5f9532dc7a23fac62defec28e0fe5"}, - {file = "aiocsv-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:59b0ea2d9e73539d4c1276467c4457acafa995717ea1b5340f3737f2cde2f71a"}, - {file = "aiocsv-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1c7d1700b8de16f25b24bfcebfc2b0817b29ce413f6961f08d5aa95bf00a6862"}, - {file = "aiocsv-1.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aa9629c8a1c07e9d02c7d80d84f021f7994fe30d021f13ac963e251b54724ef"}, - {file = "aiocsv-1.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d125286f971e0038e8872f31b6f1cd6184b9c508445e6633f075d8b543b444bc"}, - {file = "aiocsv-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:b7220b4a6545abbbb6ab8fe7d4880aa8334f156b872b83641b898df2da9a6484"}, - {file = "aiocsv-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfd2ef214b6d7944991f62ac593ad45bdaf0ed9f5741c8441ee7de148e512fe7"}, - {file = "aiocsv-1.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c3e5a817b3489283cc1fd80f8ba56431d552dc9ea4e539c0069d8d56bf0fba7"}, - {file = "aiocsv-1.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2ef14fa0839394ecc52274ea538b12b7b2e756eb0f514902a8fb391612161079"}, - {file = "aiocsv-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:17341fa3b90414adda6cd8c79efc3c1a3f58a4dc72c2053c4532e82b61ef9f5e"}, - {file = "aiocsv-1.3.2.tar.gz", hash = "sha256:806d93465c7808d58d3ff0d2bba270fb4d04b934be6a1e95d0834c50a510910e"}, -] - -[package.dependencies] -typing_extensions = "*" - [[package]] name = "aiohttp" version = "3.9.3" @@ -506,18 +473,18 @@ files = [ [[package]] name = "filelock" -version = "3.15.1" +version = "3.15.3" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"}, - {file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"}, + {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, + {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -2109,18 +2076,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "70.0.0" +version = "70.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, + {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2475,4 +2442,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "7fbd3ca1216cdaac0216b8baf06155b29eb660a4dc51de99025e00daf7b31aa9" +content-hash = "0139321fec59a7a2799037bf63b85faf591ddf8d502e2049a76419a04b2a5143" diff --git a/pyproject.toml b/pyproject.toml index a8c02d6..8e0b09c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ httpx = "^0.27.0" pydantic-settings = "^2.2.1" pytest-vcr = "^1.0.2" influxdb-client = "^1.43.0" -aiocsv = "1.3.2" +# aiocsv = "1.3.2" aiohttp = "3.9.3" loguru = "0.7.2" prometheus-client = "0.20.0" From 1c615ef0ef5581163e035e13e955288b8dc06f36 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Sun, 23 Jun 2024 10:37:25 +0200 Subject: [PATCH 45/61] HHT-669: extracting MetricStore client --- app/clients/influxdb/influxdb_client_impl.py | 4 ++-- .../{influxdb_client.py => metricstore_client.py} | 2 +- app/clients/influxdb/mock_infuxdbclient.py | 4 ++-- app/core/metric_repository.py | 6 +++--- app/kgexporter_context.py | 8 ++++++-- app/main.py | 6 ++++-- 6 files changed, 18 insertions(+), 12 deletions(-) rename app/clients/influxdb/{influxdb_client.py => metricstore_client.py} (91%) diff --git a/app/clients/influxdb/influxdb_client_impl.py b/app/clients/influxdb/influxdb_client_impl.py index d9a026f..fbd3428 100644 --- a/app/clients/influxdb/influxdb_client_impl.py +++ b/app/clients/influxdb/influxdb_client_impl.py @@ -3,13 +3,13 @@ from influxdb_client.client.flux_table import TableList from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync -from app.clients.influxdb.influxdb_client import InfluxDBClient from app.clients.influxdb.influxdb_settings import InfluxDBSettings +from app.clients.influxdb.metricstore_client import MetricStoreClient from app.clients.influxdb.query_result_parser import QueryResultParser from app.core.metric_value import MetricValue -class InfluxDBClientImpl(InfluxDBClient): +class InfluxDBClientImpl(MetricStoreClient): settings: InfluxDBSettings async_client: InfluxDBClientAsync diff --git a/app/clients/influxdb/influxdb_client.py b/app/clients/influxdb/metricstore_client.py similarity index 91% rename from app/clients/influxdb/influxdb_client.py rename to app/clients/influxdb/metricstore_client.py index bf11b09..d72f95d 100644 --- a/app/clients/influxdb/influxdb_client.py +++ b/app/clients/influxdb/metricstore_client.py @@ -4,7 +4,7 @@ from app.core.metric_value import MetricValue -class InfluxDBClient: +class MetricStoreClient: async def query( self, query: str, result_parser: QueryResultParser ) -> List[MetricValue]: diff --git a/app/clients/influxdb/mock_infuxdbclient.py b/app/clients/influxdb/mock_infuxdbclient.py index 26e5caa..fa26400 100644 --- a/app/clients/influxdb/mock_infuxdbclient.py +++ b/app/clients/influxdb/mock_infuxdbclient.py @@ -1,11 +1,11 @@ from typing import Dict, List -from app.clients.influxdb.influxdb_client import InfluxDBClient +from app.clients.influxdb.metricstore_client import MetricStoreClient from app.clients.influxdb.query_result_parser import QueryResultParser from app.core.metric_value import MetricValue -class MockInfluxDBClient(InfluxDBClient): +class MockInfluxDBClient(MetricStoreClient): results: Dict[str, List[MetricValue]] def __init__(self): diff --git a/app/core/metric_repository.py b/app/core/metric_repository.py index fd08919..fc81ba8 100644 --- a/app/core/metric_repository.py +++ b/app/core/metric_repository.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from enum import StrEnum -from app.clients.influxdb.influxdb_client import InfluxDBClient +from app.clients.influxdb.metricstore_client import MetricStoreClient from app.clients.influxdb.query_result_parser import QueryResultParser from app.clients.influxdb.simple_result_parser import SimpleResultParser from app.core.metric_value import MetricValue @@ -32,9 +32,9 @@ class MetricQuery: class MetricRepository: - client: InfluxDBClient + client: MetricStoreClient - def __init__(self, client: InfluxDBClient): + def __init__(self, client: MetricStoreClient): self.client = client async def query_many( diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index e8cf58d..f10fce6 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -6,7 +6,7 @@ from loguru import logger from prometheus_client import start_http_server -from app.clients.influxdb.influxdb_client import InfluxDBClient +from app.clients.influxdb.metricstore_client import MetricStoreClient from app.clients.k8s.k8s_client import K8SClient from app.clients.metadata_service.metadata_service_client import MetadataServiceClient from app.core.async_queue import AsyncQueue @@ -35,7 +35,7 @@ def __init__( clock: Clock, metadata_client: MetadataServiceClient, k8s_client: K8SClient, - influxdb_client: InfluxDBClient, + influxdb_client: MetricStoreClient, jsonld_config: JsonLDConfiguration, settings: KGExporterSettings, ): @@ -76,3 +76,7 @@ def stop(self) -> None: def wait_for_termination(self) -> None: self.runner.run(self.terminated.wait()) logger.info("Application terminated.") + + def exit_gracefully(self) -> None: + self.stop() + self.wait_for_termination() diff --git a/app/main.py b/app/main.py index 99818d3..49c9006 100644 --- a/app/main.py +++ b/app/main.py @@ -5,13 +5,15 @@ def main() -> None: - signal.signal(signal.SIGINT, signal.SIG_DFL) builder = KGExporterContextBuilder.from_args(sys.argv[1:]) if builder: context = builder.build() + + signal.signal(signal.SIGINT, context.exit_gracefully) # type: ignore + signal.signal(signal.SIGTERM, context.exit_gracefully) # type: ignore + context.start() context.wait_for_termination() - context.stop() if __name__ == "__main__": From d65f7ed99d0efd1cff7ed5193affd841a196e06a Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Sun, 23 Jun 2024 10:43:10 +0200 Subject: [PATCH 46/61] HHT-669: prometheus metric client --- README.md | 2 +- app/clients/influxdb/influxdb_client_impl.py | 2 +- app/clients/influxdb/query_result_parser.py | 4 +- app/clients/influxdb/simple_result_parser.py | 18 +-- .../influxdb/test_simple_result_parser.py | 14 +- app/clients/prometheus/__init__.py | 0 .../prometheus/mock_prometheus_client.py | 18 +++ app/clients/prometheus/prometheus_client.py | 27 ++++ .../prometheus/prometheus_client_settings.py | 5 + .../prometheus/prometheus_result_parser.py | 24 ++++ .../test_prometheus_result_parser.py | 66 +++++++++ app/core/kg_builder.py | 13 ++ app/core/kg_updater.py | 17 +++ app/kgexporter_context.py | 4 +- app/kgexporter_context_builder.py | 7 +- app/kgexporter_settings.py | 2 + app/test_kgexporter_context.py | 2 + app/test_pydantic_yaml.py | 2 + etc/config.yaml | 10 +- poetry.lock | 127 +++++++++--------- pyproject.toml | 2 +- 21 files changed, 276 insertions(+), 90 deletions(-) create mode 100644 app/clients/prometheus/__init__.py create mode 100644 app/clients/prometheus/mock_prometheus_client.py create mode 100644 app/clients/prometheus/prometheus_client.py create mode 100644 app/clients/prometheus/prometheus_client_settings.py create mode 100644 app/clients/prometheus/prometheus_result_parser.py create mode 100644 app/clients/prometheus/test_prometheus_result_parser.py diff --git a/README.md b/README.md index efbcff5..4c7ccf3 100644 --- a/README.md +++ b/README.md @@ -91,7 +91,7 @@ poetry run pytest 5. Launch the service: ```bash -poetry run python app/kg_exporter.py +poetry run python -m app.main --config ./etc/config.yaml ``` ## Integration tests diff --git a/app/clients/influxdb/influxdb_client_impl.py b/app/clients/influxdb/influxdb_client_impl.py index fbd3428..b5a7525 100644 --- a/app/clients/influxdb/influxdb_client_impl.py +++ b/app/clients/influxdb/influxdb_client_impl.py @@ -36,5 +36,5 @@ def parse_response( query_results = [] for table in flux_result: for record in table.records: - query_results.append(result_parser.parse(record)) + query_results.extend(result_parser.parse(record)) return query_results diff --git a/app/clients/influxdb/query_result_parser.py b/app/clients/influxdb/query_result_parser.py index 6f38f7d..d5876fa 100644 --- a/app/clients/influxdb/query_result_parser.py +++ b/app/clients/influxdb/query_result_parser.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any, Dict, List from datetime import datetime @@ -24,5 +24,5 @@ def get_float(self, value: Any) -> float: f"Unable to convert value '{value}' to float. Unknown type {type(value)}." ) - def parse(self, row: Dict[str, Any]) -> MetricValue: + def parse(self, row: Dict[str, Any]) -> List[MetricValue]: raise NotImplementedError diff --git a/app/clients/influxdb/simple_result_parser.py b/app/clients/influxdb/simple_result_parser.py index c1727db..374d466 100644 --- a/app/clients/influxdb/simple_result_parser.py +++ b/app/clients/influxdb/simple_result_parser.py @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any, Dict, List from app.clients.influxdb.query_result_parser import QueryResultParser from app.core.metric_value import MetricValue @@ -10,10 +10,12 @@ class SimpleResultParser(QueryResultParser): TIMESTAMP_FIELD: str = "timestamp" VALUE_FIELD: str = "value" - def parse(self, row: Dict[str, Any]) -> MetricValue: - return MetricValue( - row[self.METRICID_FIELD], - row[self.RESOURCEID_FIELD], - self.get_timestamp(row[self.TIMESTAMP_FIELD]), - self.get_float(row[self.VALUE_FIELD]), - ) + def parse(self, row: Dict[str, Any]) -> List[MetricValue]: + return [ + MetricValue( + row[self.METRICID_FIELD], + row[self.RESOURCEID_FIELD], + self.get_timestamp(row[self.TIMESTAMP_FIELD]), + self.get_float(row[self.VALUE_FIELD]), + ) + ] diff --git a/app/clients/influxdb/test_simple_result_parser.py b/app/clients/influxdb/test_simple_result_parser.py index 759f612..a48a7fc 100644 --- a/app/clients/influxdb/test_simple_result_parser.py +++ b/app/clients/influxdb/test_simple_result_parser.py @@ -38,11 +38,13 @@ def test_parse(self) -> None: actual = parser.parse(row) self.assertEqual( - MetricValue( - "RAM.Capacity", - "glaciation-testm1w5-master01", - 1717142400000, - 26237685760.0, - ), + [ + MetricValue( + "RAM.Capacity", + "glaciation-testm1w5-master01", + 1717142400000, + 26237685760.0, + ) + ], actual, ) diff --git a/app/clients/prometheus/__init__.py b/app/clients/prometheus/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/clients/prometheus/mock_prometheus_client.py b/app/clients/prometheus/mock_prometheus_client.py new file mode 100644 index 0000000..d122a00 --- /dev/null +++ b/app/clients/prometheus/mock_prometheus_client.py @@ -0,0 +1,18 @@ +from typing import Dict, List + +from app.clients.influxdb.metricstore_client import MetricStoreClient +from app.clients.influxdb.query_result_parser import QueryResultParser +from app.core.metric_value import MetricValue + + +class MockPrometheusClient(MetricStoreClient): + results: Dict[str, List[MetricValue]] + + def __init__(self): + self.results = dict() + + def mock_query(self, query: str, client_response: List[MetricValue]) -> None: + self.results[query] = client_response + + async def query(self, query: str, _: QueryResultParser) -> List[MetricValue]: + return self.results.get(query) or [] diff --git a/app/clients/prometheus/prometheus_client.py b/app/clients/prometheus/prometheus_client.py new file mode 100644 index 0000000..b6ae680 --- /dev/null +++ b/app/clients/prometheus/prometheus_client.py @@ -0,0 +1,27 @@ +from typing import List + +from aioprometheus_api_client import PrometheusConnect + +from app.clients.influxdb.metricstore_client import MetricStoreClient +from app.clients.influxdb.query_result_parser import QueryResultParser +from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings +from app.core.metric_value import MetricValue + + +class PrometheusClient(MetricStoreClient): + settings: PrometheusClientSettings + connect: PrometheusConnect + + def __init__(self, settings: PrometheusClientSettings): + self.settings = settings + self.connect = PrometheusConnect(url=self.settings.url, disable_ssl=True) + + async def query( + self, query: str, result_parser: QueryResultParser + ) -> List[MetricValue]: + metrics: List[MetricValue] = [] + results = await self.connect.async_custom_query(query) + for result in results: + parsed = result_parser.parse(result) + metrics.extend(parsed) + return metrics diff --git a/app/clients/prometheus/prometheus_client_settings.py b/app/clients/prometheus/prometheus_client_settings.py new file mode 100644 index 0000000..c924eca --- /dev/null +++ b/app/clients/prometheus/prometheus_client_settings.py @@ -0,0 +1,5 @@ +from pydantic_settings import BaseSettings + + +class PrometheusClientSettings(BaseSettings): + url: str diff --git a/app/clients/prometheus/prometheus_result_parser.py b/app/clients/prometheus/prometheus_result_parser.py new file mode 100644 index 0000000..2fcf75a --- /dev/null +++ b/app/clients/prometheus/prometheus_result_parser.py @@ -0,0 +1,24 @@ +from typing import Any, Dict, List + +from app.clients.influxdb.query_result_parser import QueryResultParser +from app.core.metric_value import MetricValue + + +class PrometheusResultParser(QueryResultParser): + def parse(self, row: Dict[str, Any]) -> List[MetricValue]: + metric = row.get("metric") + value = row.get("value") + if metric and value: + metric_name = metric.get("__name__") or "undefined" + resource = metric.get("resource") or "undefined" + timestamp = int(value[0] * 1000) + metric_value = float(value[1]) + value = MetricValue( + metric_name, + resource, + timestamp=timestamp, + value=metric_value, + ) + return [value] + else: + return [] diff --git a/app/clients/prometheus/test_prometheus_result_parser.py b/app/clients/prometheus/test_prometheus_result_parser.py new file mode 100644 index 0000000..8c2b327 --- /dev/null +++ b/app/clients/prometheus/test_prometheus_result_parser.py @@ -0,0 +1,66 @@ +from typing import Any, Dict, List + +from unittest import TestCase + +from app.clients.prometheus.prometheus_result_parser import PrometheusResultParser +from app.core.metric_value import MetricValue + + +class PrometheusResultParserTest(TestCase): + input: List[Dict[str, Any]] = [ + { + "metric": { + "__name__": "RAM.Capacity", + "exported_instance": "glaciation-testm1w5-master01", + "resource": "10.14.1.160: 9102", + "job": "kepler", + "mode": "dynamic", + "package": "estimator0", + "source": "trained_power_model", + }, + "value": [1719489127.684, "23487213.396"], + }, + { + "metric": { + "__name__": "RAM.Capacity", + "exported_instance": "glaciation-testm1w5-worker01", + "resource": "10.14.1.161: 9102", + "job": "kepler", + "mode": "dynamic", + "package": "estimator0", + "source": "trained_power_model", + }, + "value": [1719489127.684, "4996106.442"], + }, + ] + + def test_parse(self) -> None: + parser = PrometheusResultParser() + + actual = parser.parse(self.input[0]) + + self.assertEqual( + [ + MetricValue( + "RAM.Capacity", + "10.14.1.160: 9102", + 1719489127684, + 23487213.396, + ) + ], + actual, + ) + + actual = parser.parse(self.input[1]) + + self.assertEqual( + [ + MetricValue( + "RAM.Capacity", + "10.14.1.161: 9102", + 1719489127684, + 4996106.442, + ) + ], + actual, + ) diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 8c8560a..354eaf4 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -3,6 +3,7 @@ import asyncio from loguru import logger +from prometheus_client import Counter from pydantic_settings import BaseSettings from app.clients.k8s.k8s_client import K8SClient @@ -37,6 +38,15 @@ class KGBuilder: settings: KGBuilderSettings slice_strategy: SliceStrategy slice_assembler: KGSliceAssembler + errors_metric: Counter = Counter( + "builder_errors_total", "knowledge graph builder errors" + ) + successes_metric: Counter = Counter( + "builder_successes_total", "knowledge graph builder successes" + ) + passes_metric: Counter = Counter( + "builder_cycles_total", "knowledge graph builder cycles" + ) def __init__( self, @@ -64,9 +74,12 @@ async def run(self) -> None: now_seconds = self.clock.now_seconds() try: + self.passes_metric.inc() await self.run_cycle(now_seconds) + self.successes_metric.inc() except Exception as e: logger.error(f"Builder error: {e}") + self.errors_metric.inc() sleep_seconds = ( now_seconds diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py index 8b0a5b2..c0d3a7c 100644 --- a/app/core/kg_updater.py +++ b/app/core/kg_updater.py @@ -1,6 +1,7 @@ import asyncio from loguru import logger +from prometheus_client import Counter from app.core.async_queue import AsyncQueue from app.core.dkg_slice_store import DKGSliceStore @@ -13,6 +14,18 @@ class KGUpdater: kg_repository: KGRepository terminated: asyncio.Event slices: DKGSliceStore + errors_metric: Counter = Counter( + "updater_errors_total", "knowledge graph updater errors" + ) + successes_metric: Counter = Counter( + "updater_successes_total", "knowledge graph updater successes" + ) + passes_metric: Counter = Counter( + "updater_cycles_total", "knowledge graph updater cycles" + ) + updates_metric: Counter = Counter( + "updater_updates_total", "knowledge graph updates" + ) def __init__( self, @@ -29,8 +42,11 @@ async def run(self) -> None: logger.info("Updater started.") while not self.terminated.is_set(): try: + self.passes_metric.inc() await self.run_cycle() + self.successes_metric.inc() except Exception as e: + self.errors_metric.inc() logger.error(f"Updater error: {e}") logger.info("Updater stopped.") @@ -45,5 +61,6 @@ async def run_cycle(self) -> None: timestamp=slice.timestamp, ) await self.kg_repository.update(slice.slice_id, slice.graph) + self.updates_metric.inc() else: await asyncio.sleep(0.5) diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index f10fce6..a80ae39 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -35,13 +35,13 @@ def __init__( clock: Clock, metadata_client: MetadataServiceClient, k8s_client: K8SClient, - influxdb_client: MetricStoreClient, + metric_store_client: MetricStoreClient, jsonld_config: JsonLDConfiguration, settings: KGExporterSettings, ): self.settings = settings kg_repository = KGRepository(metadata_client, jsonld_config) - influxdb_repository = MetricRepository(influxdb_client) + influxdb_repository = MetricRepository(metric_store_client) self.terminated = asyncio.Event() self.queue = AsyncQueue[DKGSlice]() self.builder = KGBuilder( diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index b54b766..89deb2d 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -4,11 +4,11 @@ from argparse import Namespace from io import StringIO -from app.clients.influxdb.influxdb_client_impl import InfluxDBClientImpl from app.clients.k8s.k8s_client_impl import K8SClientImpl from app.clients.metadata_service.metadata_service_client_impl import ( MetadataServiceClientImpl, ) +from app.clients.prometheus.prometheus_client import PrometheusClient from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.id_base import IdBase from app.kg.iri import IRI @@ -61,14 +61,15 @@ def build(self) -> KGExporterContext: clock = ClockImpl() metadata_client = MetadataServiceClientImpl(self.settings.metadata) k8s_client = K8SClientImpl(self.settings.k8s) - influxdb_client = InfluxDBClientImpl(self.settings.influxdb) + # influxdb_client = InfluxDBClientImpl(self.settings.influxdb) + prometheus_client = PrometheusClient(self.settings.prometheus_client) jsonld_config = self.get_jsonld_config() context = KGExporterContext( clock, metadata_client, k8s_client, - influxdb_client, + prometheus_client, jsonld_config, self.settings, ) diff --git a/app/kgexporter_settings.py b/app/kgexporter_settings.py index 69ceb87..198f5eb 100644 --- a/app/kgexporter_settings.py +++ b/app/kgexporter_settings.py @@ -5,6 +5,7 @@ from app.clients.metadata_service.metadata_service_settings import ( MetadataServiceSettings, ) +from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings from app.core.kg_builder import KGBuilderSettings @@ -17,4 +18,5 @@ class KGExporterSettings(BaseSettings): k8s: K8SSettings influxdb: InfluxDBSettings metadata: MetadataServiceSettings + prometheus_client: PrometheusClientSettings prometheus: PrometheusSettings diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 1c91044..e7a80c2 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -13,6 +13,7 @@ MockMetadataServiceClient, SerializedGraph, ) +from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings from app.core.kg_builder import KGBuilderSettings, QuerySettings from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId @@ -98,5 +99,6 @@ def test_kg_exporter_settings(self) -> KGExporterSettings: ), metadata=MetadataServiceSettings(), prometheus=PrometheusSettings(endpoint_port=8080), + prometheus_client=PrometheusClientSettings(url="prometheus.integration"), ) return settings diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py index 16b6ce5..17ceac8 100644 --- a/app/test_pydantic_yaml.py +++ b/app/test_pydantic_yaml.py @@ -6,6 +6,7 @@ from app.clients.metadata_service.metadata_service_settings import ( MetadataServiceSettings, ) +from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings from app.core.kg_builder import KGBuilderSettings, QuerySettings from app.kgexporter_settings import KGExporterSettings, PrometheusSettings from app.pydantic_yaml import from_yaml, to_yaml @@ -23,6 +24,7 @@ def test_dump_load_settings(self): ), metadata=MetadataServiceSettings(), prometheus=PrometheusSettings(endpoint_port=8080), + prometheus_client=PrometheusClientSettings(url="prometheus.integration"), ) with TemporaryDirectory("-pydantic", "test") as tmpdir: yaml_file = f"{tmpdir}/test.yaml" diff --git a/etc/config.yaml b/etc/config.yaml index 210019d..dd2b1a6 100644 --- a/etc/config.yaml +++ b/etc/config.yaml @@ -1,5 +1,5 @@ builder: - builder_tick_seconds: 1 + builder_tick_seconds: 10 node_port: 80 queries: node_queries: [] @@ -11,9 +11,13 @@ influxdb: token: token url: test k8s: - in_cluster: true + in_cluster: false metadata: metadata_service_push_period_sec: 60 - metadata_service_url: metadata-service + metadata_service_url: metadata.integration + # metadata_service_url: metadata-service + use_single_url: true prometheus: endpoint_port: 8080 +prometheus_client: + url: prometheus.integration diff --git a/poetry.lock b/poetry.lock index 4d667ca..c9e40a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -95,6 +95,26 @@ yarl = ">=1.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns", "brotlicffi"] +[[package]] +name = "aioprometheus-api-client" +version = "0.0.2" +description = "An async python api to collect data from prometheus" +optional = false +python-versions = "*" +files = [ + {file = "aioprometheus-api-client-0.0.2.tar.gz", hash = "sha256:3c67055b02a66aa7ba1ee10627ef4c536662afa71f711a84f0dbd75363eae938"}, + {file = "aioprometheus_api_client-0.0.2-py3-none-any.whl", hash = "sha256:d18c2ac34ad3bd34ceaa3eb6468685acf077ff134967abf99d55b7f0c6cc8b4c"}, +] + +[package.dependencies] +anyio = "*" +dateparser = "*" +httmock = "*" +httpx = "*" +matplotlib = "*" +numpy = "*" +pandas = ">=1.0.0" + [[package]] name = "aiosignal" version = "1.3.1" @@ -473,13 +493,13 @@ files = [ [[package]] name = "filelock" -version = "3.15.3" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, - {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] @@ -774,13 +794,13 @@ files = [ [[package]] name = "influxdb-client" -version = "1.43.0" +version = "1.44.0" description = "InfluxDB 2.0 Python client library" optional = false python-versions = ">=3.7" files = [ - {file = "influxdb_client-1.43.0-py3-none-any.whl", hash = "sha256:f079e63018f521024118bc0141b6403c65506711e2e6e93500f8e69f1675dc38"}, - {file = "influxdb_client-1.43.0.tar.gz", hash = "sha256:ae2614d891baed52c0ae8f6194a04ee5b1c6422f6061318a3639fe63b7671b25"}, + {file = "influxdb_client-1.44.0-py3-none-any.whl", hash = "sha256:e4c1ac9c9925c4693d63e988e22f65d2ddc1867f8910813b7f4721633175f2a0"}, + {file = "influxdb_client-1.44.0.tar.gz", hash = "sha256:da9bc0cc49de4a0ac844d833c1efa65227ec5a2254e63cdbe07b5d532c0c37f8"}, ] [package.dependencies] @@ -1169,38 +1189,38 @@ files = [ [[package]] name = "mypy" -version = "1.10.0" +version = "1.10.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, ] [package.dependencies] @@ -1545,25 +1565,6 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" -[[package]] -name = "prometheus-api-client" -version = "0.5.5" -description = "A small python api to collect data from prometheus" -optional = false -python-versions = "*" -files = [ - {file = "prometheus-api-client-0.5.5.tar.gz", hash = "sha256:59449c4be0485ea5a2dfbbed2482eb0a3eeac8c131b3efc866df4c9a0c403bb7"}, - {file = "prometheus_api_client-0.5.5-py3-none-any.whl", hash = "sha256:e65ad16c262da15ed2d2288b521a80278feed2d31e7f0e732a935b67af8e24b4"}, -] - -[package.dependencies] -dateparser = "*" -httmock = "*" -matplotlib = "*" -numpy = "*" -pandas = ">=1.4.0" -requests = "*" - [[package]] name = "prometheus-client" version = "0.20.0" @@ -1726,13 +1727,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.3.3" +version = "2.3.4" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.3.3-py3-none-any.whl", hash = "sha256:e4ed62ad851670975ec11285141db888fd24947f9440bd4380d7d8788d4965de"}, - {file = "pydantic_settings-2.3.3.tar.gz", hash = "sha256:87fda838b64b5039b970cd47c3e8a1ee460ce136278ff672980af21516f6e6ce"}, + {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"}, + {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"}, ] [package.dependencies] @@ -2076,13 +2077,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "70.1.0" +version = "70.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, - {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, + {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"}, + {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"}, ] [package.extras] @@ -2209,13 +2210,13 @@ tests = ["Werkzeug (==2.0.3)", "aiohttp", "boto3", "httplib2", "httpx", "pytest" [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -2442,4 +2443,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "0139321fec59a7a2799037bf63b85faf591ddf8d502e2049a76419a04b2a5143" +content-hash = "729520443c40116949aa6713601de92af4fd5ae6c7dad9c729705c4f7cab6b24" diff --git a/pyproject.toml b/pyproject.toml index 8e0b09c..f477969 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ influxdb-client = "^1.43.0" aiohttp = "3.9.3" loguru = "0.7.2" prometheus-client = "0.20.0" -prometheus_api_client = "0.5.5" +aioprometheus-api-client = "0.0.2" [tool.poetry.group.dev.dependencies] black = "^23.12" From 6278312354fc9da35dfa458c8ce416d894350f7f Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 27 Jun 2024 15:10:47 +0200 Subject: [PATCH 47/61] HHT-669: signal handler fix --- app/kgexporter_context.py | 2 +- app/kgexporter_context_builder.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index a80ae39..2305531 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -77,6 +77,6 @@ def wait_for_termination(self) -> None: self.runner.run(self.terminated.wait()) logger.info("Application terminated.") - def exit_gracefully(self) -> None: + def exit_gracefully(self, _1: Any, _2: Any) -> None: self.stop() self.wait_for_termination() diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index 89deb2d..d742bf3 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -61,7 +61,6 @@ def build(self) -> KGExporterContext: clock = ClockImpl() metadata_client = MetadataServiceClientImpl(self.settings.metadata) k8s_client = K8SClientImpl(self.settings.k8s) - # influxdb_client = InfluxDBClientImpl(self.settings.influxdb) prometheus_client = PrometheusClient(self.settings.prometheus_client) jsonld_config = self.get_jsonld_config() From cba338b2cca9c01b8bf947c887e6ab3968a0fd02 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 27 Jun 2024 15:31:56 +0200 Subject: [PATCH 48/61] HHT-669: Single Slice Strategy --- app/core/kg_builder.py | 9 ++- app/core/single_slice_strategy.py | 25 ++++++ app/core/test_kg_builder.py | 6 +- app/core/test_single_slice_strategy.py | 103 +++++++++++++++++++++++++ app/test_kgexporter_context.py | 6 +- app/test_pydantic_yaml.py | 6 +- charts/app/values.yaml | 4 + etc/config.yaml | 3 +- 8 files changed, 157 insertions(+), 5 deletions(-) create mode 100644 app/core/single_slice_strategy.py create mode 100644 app/core/test_single_slice_strategy.py diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 354eaf4..0d64b19 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -11,6 +11,7 @@ from app.core.kg_repository import KGRepository from app.core.kg_slice_assembler import KGSliceAssembler from app.core.metric_repository import MetricQuery, MetricRepository +from app.core.single_slice_strategy import SingleSliceStrategy from app.core.slice_for_node_strategy import SliceForNodeStrategy from app.core.slice_strategy import SliceStrategy from app.core.types import DKGSlice, MetricSnapshot @@ -26,6 +27,8 @@ class QuerySettings(BaseSettings): class KGBuilderSettings(BaseSettings): builder_tick_seconds: int node_port: int + is_single_slice: bool + single_slice_url: str queries: QuerySettings @@ -65,7 +68,11 @@ def __init__( self.kg_repository = kg_repository self.influxdb_repository = influxdb_repository self.settings = settings - self.slice_strategy = SliceForNodeStrategy(node_port=settings.node_port) + self.slice_strategy = ( + SingleSliceStrategy(settings.single_slice_url) + if settings.is_single_slice + else SliceForNodeStrategy(node_port=settings.node_port) + ) self.slice_assembler = KGSliceAssembler() async def run(self) -> None: diff --git a/app/core/single_slice_strategy.py b/app/core/single_slice_strategy.py new file mode 100644 index 0000000..ab4a04e --- /dev/null +++ b/app/core/single_slice_strategy.py @@ -0,0 +1,25 @@ +from typing import Dict + +from urllib.parse import urlparse + +from app.clients.k8s.k8s_client import ResourceSnapshot +from app.core.slice_strategy import SliceStrategy +from app.core.types import KGSliceId, MetricSnapshot, SliceInputs + + +class SingleSliceStrategy(SliceStrategy): + metadata_host: str + metadata_port: int + + def __init__(self, metadata_url: str): + parse_result = urlparse(metadata_url) + self.metadata_host = parse_result.hostname or "unknown" + self.metadata_port = parse_result.port or 80 + + def get_slices( + self, resources: ResourceSnapshot, metrics: MetricSnapshot + ) -> Dict[KGSliceId, SliceInputs]: + result: Dict[KGSliceId, SliceInputs] = dict() + slice_id = KGSliceId(self.metadata_host, self.metadata_port) + result[slice_id] = SliceInputs(resources, metrics) + return result diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index 892e59b..4d47ddf 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -39,7 +39,11 @@ def setUp(self) -> None: self.running_event = asyncio.Event() self.runner = asyncio.Runner() self.settings = KGBuilderSettings( - builder_tick_seconds=1, node_port=80, queries=QuerySettings() + builder_tick_seconds=1, + node_port=80, + queries=QuerySettings(), + is_single_slice=False, + single_slice_url="test", ) def test_build_minimal(self) -> None: diff --git a/app/core/test_single_slice_strategy.py b/app/core/test_single_slice_strategy.py new file mode 100644 index 0000000..5998287 --- /dev/null +++ b/app/core/test_single_slice_strategy.py @@ -0,0 +1,103 @@ +from unittest import TestCase + +from app.core.single_slice_strategy import SingleSliceStrategy +from app.core.test_snapshot_base import SnapshotTestBase +from app.core.types import KGSliceId + + +class SingleSliceStrategyTest(TestCase, SnapshotTestBase): + def test_split_empty(self) -> None: + strategy = SingleSliceStrategy(metadata_url="http://metadata-service:80") + resources = self.load_k8s_snapshot("empty") + metrics = self.load_metric_snapshot("empty") + actual = strategy.get_slices(resources, metrics) + expected_slice_id = KGSliceId(node_ip="metadata-service", port=80) + self.assertEqual({expected_slice_id}, actual.keys()) + + inputs = actual[expected_slice_id] + actual_names = inputs.resource.get_resource_names() + self.assertEqual(actual_names, set()) + + actual_metric_names = inputs.metrics.get_metric_names() + self.assertEqual(actual_metric_names, set()) + + def test_split_minimal(self) -> None: + strategy = SingleSliceStrategy(metadata_url="http://metadata-service:80") + resources = self.load_k8s_snapshot("minimal") + metrics = self.load_metric_snapshot("minimal") + actual = strategy.get_slices(resources, metrics) + + expected_slice_id = KGSliceId(node_ip="metadata-service", port=80) + self.assertEqual({expected_slice_id}, actual.keys()) + + inputs = actual[expected_slice_id] + actual_names = inputs.resource.get_resource_names() + self.assertEqual( + { + "glaciation-test-master01", + "coredns", + "coredns-787d4945fb", + "coredns-787d4945fb-l85r5", + }, + actual_names, + ) + + actual_metric_names = inputs.metrics.get_metric_names() + self.assertEqual( + { + "eph_usage", + "cpu_usage", + "net_usage", + "ram_usage", + "pod_net_usage", + "pod_cpu_usage", + "pod_ram_usage", + "pod_eph_usage", + }, + actual_metric_names, + ) + + def test_split_multinode(self) -> None: + strategy = SingleSliceStrategy(metadata_url="http://metadata-service:80") + resources = self.load_k8s_snapshot("multinode") + metrics = self.load_metric_snapshot("multinode") + actual = strategy.get_slices(resources, metrics) + + expected_slice_id = KGSliceId(node_ip="metadata-service", port=80) + self.assertEqual({expected_slice_id}, actual.keys()) + + inputs = actual[expected_slice_id] + actual_names = inputs.resource.get_resource_names() + self.assertEqual( + { + "glaciation-test-master01", + "coredns", + "coredns-787d4945fb", + "coredns-787d4945fb-l85r5", + "kube-flannel-ds-848v8", + "glaciation-pool-0", + "init-vault-cluster-cbqhq", + "kube-flannel-ds", + "init-vault-cluster", + "glaciation-pool-0-0", + "tenant1-pool-0-1", + "glaciation-test-worker01", + }, + actual_names, + ) + + actual_metric_names = inputs.metrics.get_metric_names() + self.assertEqual( + { + "eph_usage", + "cpu_usage", + "net_usage", + "ram_usage", + "pod_net_usage", + "pod_cpu_usage", + "pod_ram_usage", + "pod_eph_usage", + "gpu_usage", + }, + actual_metric_names, + ) diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index e7a80c2..54d7956 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -91,7 +91,11 @@ def assert_graphs( def test_kg_exporter_settings(self) -> KGExporterSettings: settings = KGExporterSettings( builder=KGBuilderSettings( - builder_tick_seconds=60, node_port=80, queries=QuerySettings() + builder_tick_seconds=60, + node_port=80, + queries=QuerySettings(), + is_single_slice=False, + single_slice_url="http://metadata-service:80", ), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py index 17ceac8..ae65ecd 100644 --- a/app/test_pydantic_yaml.py +++ b/app/test_pydantic_yaml.py @@ -16,7 +16,11 @@ class PyDanticYamlTest(TestCase): def test_dump_load_settings(self): expected = KGExporterSettings( builder=KGBuilderSettings( - builder_tick_seconds=1, node_port=80, queries=QuerySettings() + builder_tick_seconds=1, + node_port=80, + queries=QuerySettings(), + is_single_slice=True, + single_slice_url="metadata-service:80", ), k8s=K8SSettings(in_cluster=True), influxdb=InfluxDBSettings( diff --git a/charts/app/values.yaml b/charts/app/values.yaml index d1860a2..adb2d4a 100644 --- a/charts/app/values.yaml +++ b/charts/app/values.yaml @@ -28,6 +28,8 @@ settings: builder: builder_tick_seconds: 60 node_port: 80 + single_slice_url: http://metadata-service:80 + is_single_slice: False queries: node_queries: [] pod_queries: [] @@ -44,3 +46,5 @@ settings: metadata_service_url: metadata-service prometheus: endpoint_port: 8080 + prometheus_client: + url: prometheus.integration diff --git a/etc/config.yaml b/etc/config.yaml index dd2b1a6..cf781df 100644 --- a/etc/config.yaml +++ b/etc/config.yaml @@ -1,6 +1,8 @@ builder: builder_tick_seconds: 10 node_port: 80 + is_single_slice: True + single_slice_url: http://metadata.integration:80 queries: node_queries: [] pod_queries: [] @@ -15,7 +17,6 @@ k8s: metadata: metadata_service_push_period_sec: 60 metadata_service_url: metadata.integration - # metadata_service_url: metadata-service use_single_url: true prometheus: endpoint_port: 8080 From e96332a30595c7b718911c33862bbead7879ace6 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Thu, 27 Jun 2024 15:52:56 +0200 Subject: [PATCH 49/61] HHT-669: minor fix --- app/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/main.py b/app/main.py index 49c9006..0cc7764 100644 --- a/app/main.py +++ b/app/main.py @@ -9,8 +9,8 @@ def main() -> None: if builder: context = builder.build() - signal.signal(signal.SIGINT, context.exit_gracefully) # type: ignore - signal.signal(signal.SIGTERM, context.exit_gracefully) # type: ignore + signal.signal(signal.SIGINT, context.exit_gracefully) + signal.signal(signal.SIGTERM, context.exit_gracefully) context.start() context.wait_for_termination() From 75425bcbf5f2094e71bcb50a1e14bb21779bc38c Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 09:17:16 +0200 Subject: [PATCH 50/61] HHT-669: context is defined in transformer --- app/clients/k8s/k8s_client.py | 10 +++++- app/clients/k8s/k8s_client_impl.py | 9 ++++- app/clients/k8s/mock_k8s_client.py | 8 +++++ .../snapshot/empty/slice_127.0.0.1_80.jsonld | 2 +- .../snapshot/minimal/k8s_api_versions.yaml | 7 ++++ .../slice_glaciation-test-master01_80.jsonld | 2 +- .../snapshot/multinode/k8s_api_versions.yaml | 7 ++++ .../slice_glaciation-test-master01_80.jsonld | 2 +- .../slice_glaciation-test-worker01_80.jsonld | 2 +- app/core/kg_builder.py | 2 +- app/core/kg_repository.py | 33 ++++++++++++++----- app/core/kg_slice_assembler.py | 31 ++++++++++++++++- app/core/kg_updater.py | 4 ++- app/core/single_slice_strategy.py | 2 ++ app/core/slice_for_node_strategy.py | 6 +++- app/core/test_graph_fixture.py | 16 +++++---- app/core/test_kg_builder.py | 2 +- app/core/test_kg_repository.py | 6 ++-- app/core/test_kg_updater.py | 4 +-- app/core/test_snapshot_base.py | 29 +++++++++------- app/core/types.py | 3 +- app/kgexporter_context.py | 4 +-- app/kgexporter_context_builder.py | 28 +--------------- app/test_kgexporter_context.py | 2 -- 24 files changed, 145 insertions(+), 76 deletions(-) create mode 100644 app/core/__fixture__/snapshot/minimal/k8s_api_versions.yaml create mode 100644 app/core/__fixture__/snapshot/multinode/k8s_api_versions.yaml diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py index 48538bd..b1c84c3 100644 --- a/app/clients/k8s/k8s_client.py +++ b/app/clients/k8s/k8s_client.py @@ -9,6 +9,7 @@ @dataclass class ResourceSnapshot: cluster: Dict[str, Any] = field(default_factory=dict) + versions_info: Dict[str, Any] = field(default_factory=dict) pods: List[Dict[str, Any]] = field(default_factory=list) nodes: List[Dict[str, Any]] = field(default_factory=list) deployments: List[Dict[str, Any]] = field(default_factory=list) @@ -112,9 +113,13 @@ async def fetch_snapshot(self) -> ResourceSnapshot: daemonsets, replicasets, ) = resources - cluster_info = await self.get_cluster_info() + general_info = await asyncio.gather( + self.get_cluster_info(), self.get_api_versions() + ) + (cluster_info, versions_info) = general_info return ResourceSnapshot( cluster=cluster_info, + versions_info=versions_info, pods=pods, nodes=nodes, deployments=deployments, @@ -147,3 +152,6 @@ async def get_jobs(self) -> List[Dict[str, Any]]: async def get_cluster_info(self) -> Dict[str, Any]: raise NotImplementedError + + async def get_api_versions(self) -> Dict[str, Any]: + raise NotImplementedError diff --git a/app/clients/k8s/k8s_client_impl.py b/app/clients/k8s/k8s_client_impl.py index 85495e4..a652f68 100644 --- a/app/clients/k8s/k8s_client_impl.py +++ b/app/clients/k8s/k8s_client_impl.py @@ -1,6 +1,6 @@ from typing import Any, Dict, List -from kubernetes import config, dynamic +from kubernetes import client, config, dynamic from kubernetes.client import api_client from app.clients.k8s.k8s_client import K8SClient @@ -49,6 +49,13 @@ async def get_cluster_info(self) -> Dict[str, Any]: else: return {} + async def get_api_versions(self) -> Dict[str, Any]: + versions = client.CoreApi().get_api_versions() + if versions: + return versions.to_dict() # type: ignore + else: + return {} + async def get_resource(self, kind: str) -> List[Dict[str, Any]]: api = self.client.resources.get(api_version="v1", kind=kind) return [item.to_dict() for item in api.get().items] diff --git a/app/clients/k8s/mock_k8s_client.py b/app/clients/k8s/mock_k8s_client.py index fd38a71..92508d0 100644 --- a/app/clients/k8s/mock_k8s_client.py +++ b/app/clients/k8s/mock_k8s_client.py @@ -12,6 +12,7 @@ class MockK8SClient(K8SClient): statefullsets: List[Dict[str, Any]] jobs: List[Dict[str, Any]] cluster: Dict[str, Any] + api_versions: Dict[str, Any] def __init__(self): self.nodes = [] @@ -22,6 +23,13 @@ def __init__(self): self.statefullsets = [] self.jobs = [] self.cluster = {} + self.api_versions = {} + + def mock_api_versions(self, api_versions: Dict[str, Any]) -> None: + self.api_versions = api_versions + + async def get_api_versions(self) -> Dict[str, Any]: + return self.api_versions def mock_cluster(self, cluster: Dict[str, Any]) -> None: self.cluster = cluster diff --git a/app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld b/app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld index c6854b3..095873a 100644 --- a/app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld +++ b/app/core/__fixture__/snapshot/empty/slice_127.0.0.1_80.jsonld @@ -2,7 +2,7 @@ "@context": { "k8s": "http://glaciation-project.eu/model/k8s/", "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", - "cluster": "https://127.0.0.1:6443/", + "cluster": "https://10.14.1.160:6443/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/app/core/__fixture__/snapshot/minimal/k8s_api_versions.yaml b/app/core/__fixture__/snapshot/minimal/k8s_api_versions.yaml new file mode 100644 index 0000000..fbd70e0 --- /dev/null +++ b/app/core/__fixture__/snapshot/minimal/k8s_api_versions.yaml @@ -0,0 +1,7 @@ +api_version: +kind: APIVersions +server_address_by_client_cid_rs: + - client_cidr: 0.0.0.0/0 + server_address: 10.14.1.160:6443 +versions: + - v1 diff --git a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld index 93735fd..1c8826b 100644 --- a/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld +++ b/app/core/__fixture__/snapshot/minimal/slice_glaciation-test-master01_80.jsonld @@ -2,7 +2,7 @@ "@context": { "k8s": "http://glaciation-project.eu/model/k8s/", "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", - "cluster": "https://127.0.0.1:6443/", + "cluster": "https://10.14.1.160:6443/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/app/core/__fixture__/snapshot/multinode/k8s_api_versions.yaml b/app/core/__fixture__/snapshot/multinode/k8s_api_versions.yaml new file mode 100644 index 0000000..fbd70e0 --- /dev/null +++ b/app/core/__fixture__/snapshot/multinode/k8s_api_versions.yaml @@ -0,0 +1,7 @@ +api_version: +kind: APIVersions +server_address_by_client_cid_rs: + - client_cidr: 0.0.0.0/0 + server_address: 10.14.1.160:6443 +versions: + - v1 diff --git a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld index da238b3..9146ea0 100644 --- a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld +++ b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-master01_80.jsonld @@ -2,7 +2,7 @@ "@context": { "k8s": "http://glaciation-project.eu/model/k8s/", "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", - "cluster": "https://127.0.0.1:6443/", + "cluster": "https://10.14.1.160:6443/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld index 2328d5c..74cc327 100644 --- a/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld +++ b/app/core/__fixture__/snapshot/multinode/slice_glaciation-test-worker01_80.jsonld @@ -2,7 +2,7 @@ "@context": { "k8s": "http://glaciation-project.eu/model/k8s/", "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", - "cluster": "https://127.0.0.1:6443/", + "cluster": "https://10.14.1.160:6443/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" }, "@graph": [ diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 0d64b19..7b5ea93 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -85,7 +85,7 @@ async def run(self) -> None: await self.run_cycle(now_seconds) self.successes_metric.inc() except Exception as e: - logger.error(f"Builder error: {e}") + logger.error(f"Builder error: {type(e)}, {e}") self.errors_metric.inc() sleep_seconds = ( diff --git a/app/core/kg_repository.py b/app/core/kg_repository.py index bcc676f..a489621 100644 --- a/app/core/kg_repository.py +++ b/app/core/kg_repository.py @@ -1,22 +1,22 @@ +from typing import Any, Dict + from io import StringIO from app.clients.metadata_service.metadata_service_client import MetadataServiceClient from app.core.kg_result_parser import KGResultParser from app.core.types import KGSliceId +from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph +from app.kg.iri import IRI from app.serialize.jsonld_configuration import JsonLDConfiguration from app.serialize.jsonld_serializer import JsonLDSerialializer class KGRepository: metadata_client: MetadataServiceClient - jsonld_config: JsonLDConfiguration - def __init__( - self, metadata_client: MetadataServiceClient, jsonld_config: JsonLDConfiguration - ): + def __init__(self, metadata_client: MetadataServiceClient): self.metadata_client = metadata_client - self.jsonld_config = jsonld_config async def query( self, slice_id: KGSliceId, query: str, result_parser: KGResultParser @@ -25,13 +25,28 @@ async def query( result = await self.metadata_client.query(host_and_port, query) return result_parser.parse(result) - async def update(self, slice_id: KGSliceId, graph: Graph) -> None: - graph_str = self.to_jsonld(graph) + async def update( + self, slice_id: KGSliceId, graph: Graph, context: Dict[str, Any] + ) -> None: + graph_str = self.to_jsonld(graph, context) host_and_port = slice_id.get_host_port() await self.metadata_client.insert(host_and_port, graph_str) - def to_jsonld(self, graph: Graph) -> str: - serializer = JsonLDSerialializer(self.jsonld_config) + def to_jsonld(self, graph: Graph, context: Dict[str, Any]) -> str: + serializer = JsonLDSerialializer(self.get_jsonld_config(context)) out = StringIO() serializer.write(out, graph) return out.getvalue() + + def get_jsonld_config(self, context: Dict[str, Any]) -> JsonLDConfiguration: + return JsonLDConfiguration( + {JsonLDConfiguration.DEFAULT_CONTEXT_IRI: context}, + { + # TODO use IRIs from UpperOntologyBase + IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasuringResource"), + IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementUnit"), + }, + ) diff --git a/app/core/kg_slice_assembler.py b/app/core/kg_slice_assembler.py index 3790add..016a883 100644 --- a/app/core/kg_slice_assembler.py +++ b/app/core/kg_slice_assembler.py @@ -1,5 +1,7 @@ from typing import Any, Dict, List, Type +from urllib.parse import urlparse + from app.clients.k8s.k8s_client import ResourceSnapshot from app.core.types import DKGSlice, KGSliceId, MetricSnapshot, SliceInputs from app.k8s_transform.cluster_transformer import ClusterToRDFTransformer @@ -25,8 +27,9 @@ def assemble( self.transform_resources(now, inputs.resource, sink) self.transform_metrics(now, inputs.metrics, sink) + context = self.get_context(inputs.resource.versions_info) - slice = DKGSlice(slice_id, sink, now) + slice = DKGSlice(slice_id, sink, context, now) return slice def transform_resources( @@ -82,3 +85,29 @@ def transform_metrics( node_transformer.transform(context) pod_transformer = PodMetricToGraphTransformer(snapshot.pod_metrics, sink) pod_transformer.transform(context) + + def get_context(self, versions_info: Dict[str, Any]) -> Dict[str, Any]: + server_address_by_client_cid_rs_list = ( + versions_info.get("server_address_by_client_cid_rs") or [] + ) + server_address_url = "https://kubernetes.local/" + for server_address_by_client_cid_rs in server_address_by_client_cid_rs_list: + server_address = server_address_by_client_cid_rs.get("server_address") + if server_address: + server_address_url = server_address + break + + context = { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": self.unify_url(server_address_url), + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + } + return context + + def unify_url(self, server_address_url: str) -> str: + parse_result = urlparse(server_address_url) + if not parse_result.scheme: + return f"https://{server_address_url}/" + else: + return server_address_url diff --git a/app/core/kg_updater.py b/app/core/kg_updater.py index c0d3a7c..634aaf3 100644 --- a/app/core/kg_updater.py +++ b/app/core/kg_updater.py @@ -60,7 +60,9 @@ async def run_cycle(self) -> None: slice=slice.slice_id, timestamp=slice.timestamp, ) - await self.kg_repository.update(slice.slice_id, slice.graph) + await self.kg_repository.update( + slice.slice_id, slice.graph, slice.context + ) self.updates_metric.inc() else: await asyncio.sleep(0.5) diff --git a/app/core/single_slice_strategy.py b/app/core/single_slice_strategy.py index ab4a04e..207892f 100644 --- a/app/core/single_slice_strategy.py +++ b/app/core/single_slice_strategy.py @@ -12,6 +12,8 @@ class SingleSliceStrategy(SliceStrategy): metadata_port: int def __init__(self, metadata_url: str): + # TODO parse + # result = urllib.parse.urlsplit(f"//{host_and_port}") parse_result = urlparse(metadata_url) self.metadata_host = parse_result.hostname or "unknown" self.metadata_port = parse_result.port or 80 diff --git a/app/core/slice_for_node_strategy.py b/app/core/slice_for_node_strategy.py index 4acce52..d202e28 100644 --- a/app/core/slice_for_node_strategy.py +++ b/app/core/slice_for_node_strategy.py @@ -38,7 +38,11 @@ def split_node( node_hostname = self.get_resource_name(node) slice_id = KGSliceId(node_hostname, self.node_port) - slice_resources = ResourceSnapshot(cluster=src_resources.cluster, nodes=[node]) + slice_resources = ResourceSnapshot( + cluster=src_resources.cluster, + versions_info=src_resources.versions_info, + nodes=[node], + ) slice_metrics = MetricSnapshot() self.add_workloads(node_hostname, index, slice_resources, src_resources) self.add_metrics(slice_resources, slice_metrics, src_metrics) diff --git a/app/core/test_graph_fixture.py b/app/core/test_graph_fixture.py index cc3a83a..beedf2b 100644 --- a/app/core/test_graph_fixture.py +++ b/app/core/test_graph_fixture.py @@ -42,12 +42,7 @@ def simple_node(self) -> Tuple[Graph, str]: def get_jsonld_config(self) -> JsonLDConfiguration: contexts: Dict[IdBase, Dict[str, Any]] = { - JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { - "k8s": "http://glaciation-project.eu/model/k8s/", - "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", - "cluster": "https://127.0.0.1:6443/", - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - } + JsonLDConfiguration.DEFAULT_CONTEXT_IRI: self.get_test_context() } return JsonLDConfiguration( contexts, @@ -61,6 +56,15 @@ def get_jsonld_config(self) -> JsonLDConfiguration: }, ) + def get_test_context(self) -> Dict[str, Any]: + context = { + "k8s": "http://glaciation-project.eu/model/k8s/", + "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", + "cluster": "https://127.0.0.1:6443/", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + } + return context + def load_json(self, name: str) -> Dict[str, Any]: with FileIO(f"app/core/__fixture__/{name}.jsonld") as f: return json.load(f) # type: ignore diff --git a/app/core/test_kg_builder.py b/app/core/test_kg_builder.py index 4d47ddf..f033da0 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/test_kg_builder.py @@ -62,7 +62,7 @@ def test_build_minimal(self) -> None: self.assert_graph(slice.graph, "minimal", slice.slice_id) def create_builder(self) -> KGBuilder: - repository = KGRepository(self.client, self.get_jsonld_config()) + repository = KGRepository(self.client) influxdb_repository = MetricRepository(self.influxdb_client) return KGBuilder( self.running_event, diff --git a/app/core/test_kg_repository.py b/app/core/test_kg_repository.py index fcb6370..3a682bf 100644 --- a/app/core/test_kg_repository.py +++ b/app/core/test_kg_repository.py @@ -14,18 +14,18 @@ class KGRepositoryTest(TestCase, TestGraphFixture): def test_update(self) -> None: client = MockMetadataServiceClient() - repository = KGRepository(client, self.get_jsonld_config()) + repository = KGRepository(client) slice_id = KGSliceId("127.0.0.1", 80) graph, expected = self.simple_node() - asyncio.run(repository.update(slice_id, graph)) + asyncio.run(repository.update(slice_id, graph, self.get_test_context())) graphs = client.take_inserts(slice_id.get_host_port()) self.assertEqual(expected, graphs[0]) def test_query(self) -> None: client = MockMetadataServiceClient() - repository = KGRepository(client, self.get_jsonld_config()) + repository = KGRepository(client) slice_id = KGSliceId("127.0.0.1", 80) query_str = "sparql query" result_parser = KGTupleParser() diff --git a/app/core/test_kg_updater.py b/app/core/test_kg_updater.py index 1d00f1e..1b86ee4 100644 --- a/app/core/test_kg_updater.py +++ b/app/core/test_kg_updater.py @@ -31,7 +31,7 @@ def test_kg_updater(self) -> None: graph, serialized = self.simple_node() slice_id = KGSliceId("127.0.0.1", 80) - slice = DKGSlice(slice_id, graph, 1) + slice = DKGSlice(slice_id, graph, self.get_test_context(), 1) self.queue.put_nowait(slice) graph_str = self.wait_for_graph(slice_id, 5) @@ -49,7 +49,7 @@ def wait_for_graph(self, slice_id: KGSliceId, seconds: int) -> SerializedGraph: raise AssertionError("time is up.") def create_updater(self) -> KGUpdater: - repository = KGRepository(self.client, self.get_jsonld_config()) + repository = KGRepository(self.client) return KGUpdater(self.running_event, self.queue, repository) async def run_updater(self, updater: KGUpdater) -> None: diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index 5f0dc66..7551bdc 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -33,8 +33,9 @@ def mock_inputs( settings: QuerySettings, ) -> None: resources = self.load_k8s_snapshot(identity) - + print(resources.versions_info) k8s_client.mock_cluster(resources.cluster) + k8s_client.mock_api_versions(resources.versions_info) k8s_client.mock_daemonsets(resources.daemonsets) k8s_client.mock_deployments(resources.deployments) k8s_client.mock_jobs(resources.jobs) @@ -60,14 +61,15 @@ def get_inputs(self, identity: str) -> SliceInputs: def load_k8s_snapshot(self, snapshot_id: str) -> ResourceSnapshot: return ResourceSnapshot( - cluster=self.load_yaml(snapshot_id, "k8s_cluster"), # type: ignore - pods=self.load_yaml(snapshot_id, "k8s_pods"), - nodes=self.load_yaml(snapshot_id, "k8s_nodes"), - deployments=self.load_yaml(snapshot_id, "k8s_deployments"), - jobs=self.load_yaml(snapshot_id, "k8s_jobs"), - statefullsets=self.load_yaml(snapshot_id, "k8s_statefullsets"), - daemonsets=self.load_yaml(snapshot_id, "k8s_daemonsets"), - replicasets=self.load_yaml(snapshot_id, "k8s_replicasets"), + cluster=self.load_yaml(snapshot_id, "k8s_cluster", {}), # type: ignore + versions_info=self.load_yaml(snapshot_id, "k8s_api_versions", {}), # type: ignore + pods=self.load_yaml(snapshot_id, "k8s_pods", []), + nodes=self.load_yaml(snapshot_id, "k8s_nodes", []), + deployments=self.load_yaml(snapshot_id, "k8s_deployments", []), + jobs=self.load_yaml(snapshot_id, "k8s_jobs", []), + statefullsets=self.load_yaml(snapshot_id, "k8s_statefullsets", []), + daemonsets=self.load_yaml(snapshot_id, "k8s_daemonsets", []), + replicasets=self.load_yaml(snapshot_id, "k8s_replicasets", []), ) def load_metric_snapshot(self, snapshot_id: str) -> MetricSnapshot: @@ -76,10 +78,12 @@ def load_metric_snapshot(self, snapshot_id: str) -> MetricSnapshot: node_metrics=self.load_metrics(snapshot_id, "metric_nodes"), ) - def load_yaml(self, snapshot_id: str, file_id: str) -> List[Dict[str, Any]]: + def load_yaml( + self, snapshot_id: str, file_id: str, default: Any + ) -> List[Dict[str, Any]]: file_path = f"{self.SNAPSHOT_ROOT}/{snapshot_id}/{file_id}.yaml" if not os.path.exists(file_path): - return [] + return default # type: ignore return self.safe_load_yaml(file_path) # type: ignore def load_metrics( @@ -134,13 +138,14 @@ def get_test_jsonld_config(self) -> JsonLDConfiguration: JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { "k8s": "http://glaciation-project.eu/model/k8s/", "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", - "cluster": "https://127.0.0.1:6443/", + "cluster": "https://10.14.1.160:6443/", "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", } } return JsonLDConfiguration( contexts, { + # TODO use IRIs IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), diff --git a/app/core/types.py b/app/core/types.py index 727d71f..3085ad0 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -1,4 +1,4 @@ -from typing import List, Set, Tuple +from typing import Any, Dict, List, Set, Tuple import urllib.parse from dataclasses import dataclass, field @@ -27,6 +27,7 @@ def from_host_port(host_and_port: str) -> "KGSliceId": class DKGSlice: slice_id: KGSliceId graph: Graph + context: Dict[str, Any] timestamp: int diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 2305531..8c088af 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -16,7 +16,6 @@ from app.core.metric_repository import MetricRepository from app.core.types import DKGSlice from app.kgexporter_settings import KGExporterSettings -from app.serialize.jsonld_configuration import JsonLDConfiguration from app.util.clock import Clock @@ -36,11 +35,10 @@ def __init__( metadata_client: MetadataServiceClient, k8s_client: K8SClient, metric_store_client: MetricStoreClient, - jsonld_config: JsonLDConfiguration, settings: KGExporterSettings, ): self.settings = settings - kg_repository = KGRepository(metadata_client, jsonld_config) + kg_repository = KGRepository(metadata_client) influxdb_repository = MetricRepository(metric_store_client) self.terminated = asyncio.Event() self.queue = AsyncQueue[DKGSlice]() diff --git a/app/kgexporter_context_builder.py b/app/kgexporter_context_builder.py index d742bf3..6573edd 100644 --- a/app/kgexporter_context_builder.py +++ b/app/kgexporter_context_builder.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional, Tuple +from typing import List, Optional, Tuple import argparse from argparse import Namespace @@ -9,13 +9,9 @@ MetadataServiceClientImpl, ) from app.clients.prometheus.prometheus_client import PrometheusClient -from app.k8s_transform.upper_ontology_base import UpperOntologyBase -from app.kg.id_base import IdBase -from app.kg.iri import IRI from app.kgexporter_context import KGExporterContext from app.kgexporter_settings import KGExporterSettings from app.pydantic_yaml import from_yaml -from app.serialize.jsonld_configuration import JsonLDConfiguration from app.util.clock_impl import ClockImpl @@ -62,34 +58,12 @@ def build(self) -> KGExporterContext: metadata_client = MetadataServiceClientImpl(self.settings.metadata) k8s_client = K8SClientImpl(self.settings.k8s) prometheus_client = PrometheusClient(self.settings.prometheus_client) - jsonld_config = self.get_jsonld_config() context = KGExporterContext( clock, metadata_client, k8s_client, prometheus_client, - jsonld_config, self.settings, ) return context - - def get_jsonld_config(self) -> JsonLDConfiguration: - contexts: Dict[IdBase, Dict[str, Any]] = { - JsonLDConfiguration.DEFAULT_CONTEXT_IRI: { - "k8s": "http://glaciation-project.eu/model/k8s/", - "glc": "https://glaciation-heu.github.io/models/reference_model.turtle", - "cluster": "https://127.0.0.1:6443/", - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", - } - } - return JsonLDConfiguration( - contexts, - { - IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasuringResource"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementUnit"), - }, - ) diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 54d7956..326b532 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -38,14 +38,12 @@ def setUp(self) -> None: self.metadata_client = MockMetadataServiceClient() self.k8s_client = MockK8SClient() self.influxdb_client = MockInfluxDBClient() - self.jsonld_config = self.get_test_jsonld_config() self.settings = self.test_kg_exporter_settings() self.context = KGExporterContext( self.clock, self.metadata_client, self.k8s_client, self.influxdb_client, - self.jsonld_config, self.settings, ) From d5938bf9f7164ba35974d05f838b54e8832642c9 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 11:22:50 +0200 Subject: [PATCH 51/61] HHT-669: k8s service --- charts/app/templates/deployment.yaml | 8 ++++++++ charts/app/templates/service.yaml | 15 +++++++++++++++ charts/app/values.yaml | 4 ++++ 3 files changed, 27 insertions(+) create mode 100644 charts/app/templates/service.yaml diff --git a/charts/app/templates/deployment.yaml b/charts/app/templates/deployment.yaml index 20c9657..1bca785 100644 --- a/charts/app/templates/deployment.yaml +++ b/charts/app/templates/deployment.yaml @@ -34,6 +34,14 @@ spec: {{- toYaml .Values.securityContext | nindent 12 }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: http + containerPort: {{ .Values.service.port }} + protocol: TCP + livenessProbe: + {{- toYaml .Values.livenessProbe | nindent 12 }} + readinessProbe: + {{- toYaml .Values.readinessProbe | nindent 12 }} resources: {{- toYaml .Values.resources | nindent 12 }} volumeMounts: diff --git a/charts/app/templates/service.yaml b/charts/app/templates/service.yaml new file mode 100644 index 0000000..a3164fc --- /dev/null +++ b/charts/app/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "app.fullname" . }} + labels: + {{- include "app.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "app.selectorLabels" . | nindent 4 }} diff --git a/charts/app/values.yaml b/charts/app/values.yaml index adb2d4a..c461cfa 100644 --- a/charts/app/values.yaml +++ b/charts/app/values.yaml @@ -24,6 +24,10 @@ securityContext: {} resources: {} +service: + type: ClusterIP + port: 8080 + settings: builder: builder_tick_seconds: 60 From c67be6337a5f2dd274daf45d49d6507c68dfcc41 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 11:38:15 +0200 Subject: [PATCH 52/61] HHT-669: cleanup --- ...st__call_send_to_404_endpoint__raises.yaml | 36 --- ..._send_to_valid_endpoint__no_exception.yaml | 36 --- app/clients/metadata.py | 22 -- .../metadata_service_client_impl.py | 2 +- .../metadata_service_settings.py | 1 - .../mock_metadata_service_client.py | 15 -- app/clients/test_metadata.py | 28 --- app/core/kg_builder.py | 5 - app/core/kg_repository.py | 12 +- app/core/test_snapshot_base.py | 12 +- app/core/types.py | 16 -- app/k8s_transform/pod_transformer.py | 1 - app/kg_exporter.py | 156 ------------ app/settings.py | 6 - app/static/stub_message.jsonld | 36 --- app/test_kgexporter_context.py | 4 +- app/tests/__init__.py | 0 app/tests/test_kg_exporter.py | 238 ------------------ charts/app/values.yaml | 1 - etc/config.yaml | 1 - 20 files changed, 13 insertions(+), 615 deletions(-) delete mode 100644 app/clients/cassettes/test__call_send_to_404_endpoint__raises.yaml delete mode 100644 app/clients/cassettes/test__call_send_to_valid_endpoint__no_exception.yaml delete mode 100644 app/clients/metadata.py delete mode 100644 app/clients/test_metadata.py delete mode 100644 app/kg_exporter.py delete mode 100644 app/settings.py delete mode 100644 app/static/stub_message.jsonld delete mode 100644 app/tests/__init__.py delete mode 100644 app/tests/test_kg_exporter.py diff --git a/app/clients/cassettes/test__call_send_to_404_endpoint__raises.yaml b/app/clients/cassettes/test__call_send_to_404_endpoint__raises.yaml deleted file mode 100644 index 41b1139..0000000 --- a/app/clients/cassettes/test__call_send_to_404_endpoint__raises.yaml +++ /dev/null @@ -1,36 +0,0 @@ -interactions: -- request: - body: '{}' - headers: - accept: - - '*/*' - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '2' - content-type: - - application/json - host: - - metadata-service - user-agent: - - python-httpx/0.27.0 - method: PATCH - uri: http://metadata-service/api/v0/graphfake_url - response: - content: '{"detail":"Not Found"}' - headers: - Connection: - - keep-alive - Content-Length: - - '22' - Content-Type: - - application/json - Date: - - Mon, 08 Apr 2024 09:45:47 GMT - Server: - - nginx/1.18.0 (Ubuntu) - http_version: HTTP/1.1 - status_code: 404 -version: 1 diff --git a/app/clients/cassettes/test__call_send_to_valid_endpoint__no_exception.yaml b/app/clients/cassettes/test__call_send_to_valid_endpoint__no_exception.yaml deleted file mode 100644 index 9c3468d..0000000 --- a/app/clients/cassettes/test__call_send_to_valid_endpoint__no_exception.yaml +++ /dev/null @@ -1,36 +0,0 @@ -interactions: -- request: - body: '{}' - headers: - accept: - - '*/*' - accept-encoding: - - gzip, deflate - connection: - - keep-alive - content-length: - - '2' - content-type: - - application/json - host: - - metadata-service - user-agent: - - python-httpx/0.27.0 - method: PATCH - uri: http://metadata-service/api/v0/graph - response: - content: '"Success"' - headers: - Connection: - - keep-alive - Content-Length: - - '9' - Content-Type: - - application/json - Date: - - Mon, 08 Apr 2024 09:45:47 GMT - Server: - - nginx/1.18.0 (Ubuntu) - http_version: HTTP/1.1 - status_code: 200 -version: 1 diff --git a/app/clients/metadata.py b/app/clients/metadata.py deleted file mode 100644 index 124fcd9..0000000 --- a/app/clients/metadata.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Client for Metadata Service""" - -import httpx -from httpx import HTTPError - -from app.settings import Settings - - -class ClientError(Exception): - pass - - -def send_to_metadata_service(message: str, settings: Settings) -> None: - """Send graph data to Metadata Service""" - try: - httpx.patch( - settings.metadata_service_url, - content=message, - headers=[("Content-Type", "application/json")], - ).raise_for_status() - except HTTPError as e: - raise ClientError(e.args[0]) from e diff --git a/app/clients/metadata_service/metadata_service_client_impl.py b/app/clients/metadata_service/metadata_service_client_impl.py index 17c375a..6eaed98 100644 --- a/app/clients/metadata_service/metadata_service_client_impl.py +++ b/app/clients/metadata_service/metadata_service_client_impl.py @@ -33,7 +33,7 @@ async def query(self, host_and_port: str, sparql: str) -> List[Triple]: headers=[("Content-Type", "application/json")], ) response.raise_for_status() - # TODO parse response + # TODO parse response when it is clear what Metadata Service query API is return [] except HTTPError as e: raise ClientError(e.args[0]) from e diff --git a/app/clients/metadata_service/metadata_service_settings.py b/app/clients/metadata_service/metadata_service_settings.py index bade1e7..7c2413e 100644 --- a/app/clients/metadata_service/metadata_service_settings.py +++ b/app/clients/metadata_service/metadata_service_settings.py @@ -3,5 +3,4 @@ class MetadataServiceSettings(BaseSettings): metadata_service_url: str = "metadata-service" - metadata_service_push_period_sec: int = 60 use_single_url: bool = True diff --git a/app/clients/metadata_service/mock_metadata_service_client.py b/app/clients/metadata_service/mock_metadata_service_client.py index 987713e..88e817a 100644 --- a/app/clients/metadata_service/mock_metadata_service_client.py +++ b/app/clients/metadata_service/mock_metadata_service_client.py @@ -74,22 +74,7 @@ async def insert(self, host_and_port: HostId, message: SerializedGraph) -> None: self.hosts[host_and_port] = HostInteractions() self.hosts[host_and_port].add_insert(message) - # TODO remove def wait_for_inserts( - self, seconds: int, count: int - ) -> List[Tuple[HostId, SerializedGraph]]: - start = datetime.datetime.now() - result = [] - while start + datetime.timedelta(seconds=seconds) > datetime.datetime.now(): - graphs = self.take_all_inserts() - print(len(graphs)) - result.extend(graphs) - if len(graphs) == count: - return graphs - asyncio.run(asyncio.sleep(0.5)) - raise AssertionError("time is up.") - - def wait_for_inserts2( self, runner: asyncio.Runner, seconds: int, count: int ) -> List[Tuple[HostId, SerializedGraph]]: start = datetime.datetime.now() diff --git a/app/clients/test_metadata.py b/app/clients/test_metadata.py deleted file mode 100644 index 3d9d1e2..0000000 --- a/app/clients/test_metadata.py +++ /dev/null @@ -1,28 +0,0 @@ -import pytest - -from app.clients.metadata import ClientError, send_to_metadata_service -from app.settings import Settings - - -def test__call_send_to_invalid_host__raises() -> None: - settings = Settings(metadata_service_url="http://example.fake/api") - with pytest.raises(ClientError) as e: - send_to_metadata_service("{}", settings) - assert isinstance(e.value, ClientError) - - -@pytest.mark.vcr() -def test__call_send_to_valid_endpoint__no_exception() -> None: - settings = Settings() - send_to_metadata_service("{}", settings) - - -@pytest.mark.vcr() -def test__call_send_to_404_endpoint__raises() -> None: - settings = Settings() - settings.metadata_service_url = settings.metadata_service_url + "fake_url" - - with pytest.raises(ClientError, match="404 Not Found") as e: - send_to_metadata_service("{}", settings) - - assert isinstance(e.value, ClientError) diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 7b5ea93..883cbe7 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -104,21 +104,16 @@ async def run_cycle(self, now_seconds: int) -> None: cluster_snapshot, pod_metrics, node_metrics, - workload_metrics, ) = await asyncio.gather( self.k8s_client.fetch_snapshot(), self.influxdb_repository.query_many(now, self.settings.queries.pod_queries), self.influxdb_repository.query_many( now, self.settings.queries.node_queries ), - self.influxdb_repository.query_many( - now, self.settings.queries.workload_queries - ), ) metric_snapshot = MetricSnapshot( list(zip(self.settings.queries.pod_queries, pod_metrics)), list(zip(self.settings.queries.node_queries, node_metrics)), - list(zip(self.settings.queries.workload_queries, workload_metrics)), ) logger.debug("Cluster snapshot: {size}", size=len(cluster_snapshot.cluster)) logger.debug("Nodes: {size}", size=len(cluster_snapshot.nodes)) diff --git a/app/core/kg_repository.py b/app/core/kg_repository.py index a489621..e42e801 100644 --- a/app/core/kg_repository.py +++ b/app/core/kg_repository.py @@ -7,7 +7,6 @@ from app.core.types import KGSliceId from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph -from app.kg.iri import IRI from app.serialize.jsonld_configuration import JsonLDConfiguration from app.serialize.jsonld_serializer import JsonLDSerialializer @@ -42,11 +41,10 @@ def get_jsonld_config(self, context: Dict[str, Any]) -> JsonLDConfiguration: return JsonLDConfiguration( {JsonLDConfiguration.DEFAULT_CONTEXT_IRI: context}, { - # TODO use IRIs from UpperOntologyBase - IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasuringResource"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementUnit"), + UpperOntologyBase.WORK_PRODUCING_RESOURCE, + UpperOntologyBase.ASPECT, + UpperOntologyBase.MEASUREMENT_PROPERTY, + UpperOntologyBase.MEASURING_RESOURCE, + UpperOntologyBase.MEASUREMENT_UNIT, }, ) diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index 7551bdc..a749108 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -17,7 +17,6 @@ from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.id_base import IdBase -from app.kg.iri import IRI from app.serialize.jsonld_configuration import JsonLDConfiguration from app.serialize.jsonld_serializer import JsonLDSerialializer @@ -145,11 +144,10 @@ def get_test_jsonld_config(self) -> JsonLDConfiguration: return JsonLDConfiguration( contexts, { - # TODO use IRIs - IRI(UpperOntologyBase.GLACIATION_PREFIX, "WorkProducingResource"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "Aspect"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementProperty"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasuringResource"), - IRI(UpperOntologyBase.GLACIATION_PREFIX, "MeasurementUnit"), + UpperOntologyBase.WORK_PRODUCING_RESOURCE, + UpperOntologyBase.ASPECT, + UpperOntologyBase.MEASUREMENT_PROPERTY, + UpperOntologyBase.MEASURING_RESOURCE, + UpperOntologyBase.MEASUREMENT_UNIT, }, ) diff --git a/app/core/types.py b/app/core/types.py index 3085ad0..7873f56 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -31,26 +31,10 @@ class DKGSlice: timestamp: int -# TODO remove -@dataclass -class Metric: - identifier: str - kind: str - measurement_name: str - metric_name: str - value: float - timestamp: int - source: str - - @dataclass class MetricSnapshot: pod_metrics: List[Tuple[MetricQuery, MetricValue]] = field(default_factory=list) node_metrics: List[Tuple[MetricQuery, MetricValue]] = field(default_factory=list) - # TODO remove workload_metrics - workload_metrics: List[Tuple[MetricQuery, MetricValue]] = field( - default_factory=list - ) def get_metric_names(self) -> Set[str]: names: Set[str] = set() diff --git a/app/k8s_transform/pod_transformer.py b/app/k8s_transform/pod_transformer.py index 89f9637..f8f7773 100644 --- a/app/k8s_transform/pod_transformer.py +++ b/app/k8s_transform/pod_transformer.py @@ -45,7 +45,6 @@ def add_pod_status(self, pod_id: IRI) -> None: start_time = self.get_opt_str_value(["status", "startTime"]) status = self.get_opt_str_value(["status", "phase"]) if status: - # TODO if start_time is None self.add_status(status_id, status, start_time or "", None) def add_containers_resources(self, pod_id: IRI, scheduler_name: str) -> None: diff --git a/app/kg_exporter.py b/app/kg_exporter.py deleted file mode 100644 index e633374..0000000 --- a/app/kg_exporter.py +++ /dev/null @@ -1,156 +0,0 @@ -from typing import Any - -import argparse -import logging -import signal -import threading -from abc import ABC, abstractmethod -from datetime import datetime, timedelta -from time import sleep - -from kubernetes import client, config, watch -from kubernetes.client.models.v1_deployment_list import V1DeploymentList -from kubernetes.client.models.v1_job_list import V1JobList -from kubernetes.client.models.v1_stateful_set_list import V1StatefulSetList - -from app.clients.metadata import send_to_metadata_service -from app.settings import Settings - - -class Resource(ABC): - def __init__(self, api: client.AppsV1Api | client.BatchV1Api) -> None: - self._api = api - - @abstractmethod - def get_list( - self, - *args: Any, - **kwargs: Any, - ) -> V1DeploymentList | V1StatefulSetList | V1JobList: - raise NotImplementedError - - -class DeploymentResource(Resource): - def __init__(self, api: client.AppsV1Api) -> None: - self._api = api - - def get_list(self, *args: Any, **kwargs: Any) -> V1DeploymentList: - return self._api.list_namespaced_deployment(*args, **kwargs) - - -class StatefulSetResource(Resource): - def __init__(self, api: client.AppsV1Api) -> None: - self._api = api - - def get_list(self, *args: Any, **kwargs: Any) -> V1StatefulSetList: - return self._api.list_namespaced_stateful_set(*args, **kwargs) - - -class JobResource(Resource): - def __init__(self, api: client.BatchV1Api) -> None: - self._api = api - - def get_list(self, *args: Any, **kwargs: Any) -> V1JobList: - return self._api.list_namespaced_job(*args, **kwargs) - - -class KubernetesWatcher: - namespace: str = "default" - - def __init__(self, resources: dict[str, Resource], logger: logging.Logger) -> None: - self._resources = resources - self._logger = logger - - def _watch_resource(self, resource_name: str, resource: Resource) -> None: - try: - for event in watch.Watch().stream( - resource.get_list, namespace=self.namespace, watch=True - ): - obj = event["object"] - metadata = obj.get("metadata") - if metadata: - name = metadata.get("name") - version = metadata.get("resourceVersion") - if name and version: - self._logger.info( - f"{event['type']}: resource={resource_name}; " - f"name={name}, version={version}" - ) - except client.ApiException as e: - self._logger.exception(e) - - def watch_resources(self, background: bool = False) -> None: - self._logger.info("Events:") - - threads = [] - for resource_name, resource in self._resources.items(): - thread = threading.Thread( - target=self._watch_resource, args=(resource_name, resource) - ) - threads.append(thread) - thread.start() - - if not background: - for thread in threads: - thread.join() - - -def run_watcher( - incluster: bool, logger: logging.Logger, background: bool = False -) -> None: - if incluster: - config.load_incluster_config() - else: - config.load_kube_config() - - apps_api = client.AppsV1Api() - batch_api = client.BatchV1Api() - - resources = { - "Deployment": DeploymentResource(apps_api), - "StatefulSet": StatefulSetResource(apps_api), - "Job": JobResource(batch_api), - } - - watcher = KubernetesWatcher(resources, logger) - watcher.watch_resources(background=background) - - -def run_periodic_push(settings: Settings) -> None: - with open("app/static/stub_message.jsonld") as f: - stub_message = f.read() - last_call = datetime.now() - send_to_metadata_service(stub_message, settings) - while True: - period = timedelta(seconds=settings.metadata_service_push_period_sec) - if datetime.now() - last_call > period: - last_call = datetime.now() - send_to_metadata_service(stub_message, settings) - else: - sleep(1) - - -def main() -> None: - parser = argparse.ArgumentParser(description="Kubernetes watcher service") - parser.add_argument( - "--incluster", - dest="incluster", - action="store_true", - help="Load a Kubernetes config from within a cluster", - ) - args = parser.parse_args() - - signal.signal(signal.SIGINT, signal.SIG_DFL) - - logger = logging.getLogger() - logger.setLevel(logging.INFO) - console_handler = logging.StreamHandler() - logger.addHandler(console_handler) - settings = Settings() - - run_watcher(args.incluster, logger, background=True) - run_periodic_push(settings) - - -if __name__ == "__main__": - main() diff --git a/app/settings.py b/app/settings.py deleted file mode 100644 index 29bf579..0000000 --- a/app/settings.py +++ /dev/null @@ -1,6 +0,0 @@ -from pydantic_settings import BaseSettings - - -class Settings(BaseSettings): - metadata_service_url: str = "http://metadata-service/api/v0/graph" - metadata_service_push_period_sec: int = 60 diff --git a/app/static/stub_message.jsonld b/app/static/stub_message.jsonld deleted file mode 100644 index 7607457..0000000 --- a/app/static/stub_message.jsonld +++ /dev/null @@ -1,36 +0,0 @@ -{ - "@context": { - "gla": "http://glaciation-project.eu/model/", - "cluster": "https://127.0.0.1:6443/", - "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#" - }, - "@graph": [ - { - "@id": "cluster:tenant1-pool-0-1", - "@type": "gla:Pod", - "gla:has-annotation": { - "@set": [ - "min.io/revision:0" - ] - }, - "gla:has-label": { - "@set": [ - "apps.kubernetes.io/pod-index:1", - "controller-revision-hash:tenant1-pool-0-6b769f799d", - "statefulset.kubernetes.io/pod-name:tenant1-pool-0-1", - "v1.min.io/console:tenant1-console", - "v1.min.io/pool:pool-0", - "v1.min.io/tenant:tenant1" - ] - }, - "gla:is-scheduled-by": "default-scheduler", - "gla:pod-phase": "Pending", - "gla:qos-class": "Burstable" - }, - { - "@id": "cluster:tenant1-pool-0.9dcbaee2-a251-46f3-ab61-1a44934ae1f2", - "@type": "gla:StatefulSet", - "gla:refers-to": "cluster:tenant1-pool-0-1" - } - ] -} diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 326b532..2270787 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -56,7 +56,7 @@ def test_end_to_end_minimal(self) -> None: ) self.context.start() - inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 5, 1) + inserts = self.metadata_client.wait_for_inserts(self.context.runner, 5, 1) self.assert_graphs("minimal", inserts) @@ -72,7 +72,7 @@ def test_end_to_end_multinode(self) -> None: ) self.context.start() - inserts = self.metadata_client.wait_for_inserts2(self.context.runner, 5, 2) + inserts = self.metadata_client.wait_for_inserts(self.context.runner, 5, 2) self.assert_graphs("multinode", inserts) diff --git a/app/tests/__init__.py b/app/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/tests/test_kg_exporter.py b/app/tests/test_kg_exporter.py deleted file mode 100644 index 3fab527..0000000 --- a/app/tests/test_kg_exporter.py +++ /dev/null @@ -1,238 +0,0 @@ -from typing import cast - -import sys -from unittest.mock import MagicMock, call, patch - -import pytest -from kubernetes import client -from kubernetes.client.models.v1_deployment_list import V1DeploymentList -from kubernetes.client.models.v1_job_list import V1JobList -from kubernetes.client.models.v1_stateful_set_list import V1StatefulSetList -from pytest_mock import MockerFixture - -from app.kg_exporter import ( - DeploymentResource, - JobResource, - KubernetesWatcher, - Resource, - StatefulSetResource, - main, - run_watcher, -) -from app.settings import Settings - - -@pytest.fixture -def logger() -> MagicMock: - return MagicMock() - - -@pytest.fixture -def api() -> MagicMock: - return MagicMock() - - -@pytest.fixture -def resources() -> dict[str, MagicMock]: - return { - "Deployment": MagicMock(spec=Resource), - "StatefulSet": MagicMock(spec=Resource), - "Job": MagicMock(spec=Resource), - } - - -@pytest.fixture -def settings(mocker: MockerFixture) -> Settings: - settings = Settings() - mocker.patch("app.kg_exporter.Settings", return_value=settings) - return settings - - -class TestDeploymentResource: - def test_get_list(self, api: MagicMock) -> None: - mock_response = V1DeploymentList(items=[{"metadata": {"name": "test_name"}}]) - api.list_namespaced_deployment.return_value = mock_response - - resource = DeploymentResource(api) - result = resource.get_list(namespace="test_namespace", watch=True) - - assert result == mock_response - api.list_namespaced_deployment.assert_called_once_with( - namespace="test_namespace", watch=True - ) - - -class TestStatefulSetResource: - def test_get_list(self, api: MagicMock) -> None: - mock_response = V1StatefulSetList(items=[{"metadata": {"name": "test_name"}}]) - api.list_namespaced_stateful_set.return_value = mock_response - - resource = StatefulSetResource(api) - result = resource.get_list(namespace="test_namespace", watch=True) - - assert result == mock_response - api.list_namespaced_stateful_set.assert_called_once_with( - namespace="test_namespace", watch=True - ) - - -class TestJobResource: - def test_get_list(self, api: MagicMock) -> None: - mock_response = V1JobList(items=[{"metadata": {"name": "test_name"}}]) - api.list_namespaced_job.return_value = mock_response - - resource = JobResource(api) - result = resource.get_list(namespace="test_namespace", watch=True) - - assert result == mock_response - api.list_namespaced_job.assert_called_once_with( - namespace="test_namespace", watch=True - ) - - -class TestKubernetesWatcher: - def test_watch_resource(self, resources: MagicMock, logger: MagicMock) -> None: - mock_watch = MagicMock() - mock_watch().stream.return_value = [ - { - "type": "ADDED", - "object": {"metadata": {"name": "test_name", "resourceVersion": "1"}}, - }, - { - "type": "MODIFIED", - "object": {"metadata": {"name": "test_name", "resourceVersion": "2"}}, - }, - { - "type": "DELETED", - "object": {"metadata": {"name": "test_name", "resourceVersion": "3"}}, - }, - ] - - watcher = KubernetesWatcher(resources, logger) - with patch("kubernetes.watch.Watch", mock_watch): - watcher._watch_resource("Deployment", resources["Deployment"]) - - mock_watch().stream.assert_called_once_with( - resources["Deployment"].get_list, - namespace=watcher.namespace, - watch=True, - ) - - assert logger.info.call_count == 3 - assert logger.info.call_args_list == [ - call("ADDED: resource=Deployment; name=test_name, version=1"), - call("MODIFIED: resource=Deployment; name=test_name, version=2"), - call("DELETED: resource=Deployment; name=test_name, version=3"), - ] - - def test_watch_resources_exception( - self, resources: MagicMock, logger: MagicMock - ) -> None: - watcher = KubernetesWatcher(resources, logger) - with patch("kubernetes.watch.Watch", side_effect=client.ApiException()): - watcher._watch_resource("Deployment", resources["Deployment"]) - - logger.exception.assert_called_once() - - @patch("app.kg_exporter.KubernetesWatcher._watch_resource") - def test_watch_resources( - self, - mock_watch_resource: MagicMock, - resources: MagicMock, - logger: MagicMock, - ) -> None: - watcher = KubernetesWatcher(resources, logger) - watcher.watch_resources() - - logger.info.assert_any_call("Events:") - cast(MagicMock, watcher._watch_resource).assert_has_calls( - [ - call(resource_name, resource) - for resource_name, resource in resources.items() - ] - ) - - -class TestRunWatcher: - @patch("kubernetes.config.load_incluster_config") - @patch("app.kg_exporter.KubernetesWatcher") - def test_incluster( - self, - mock_watcher: MagicMock, - mock_load_incluster_config: MagicMock, - logger: MagicMock, - ) -> None: - run_watcher(incluster=True, logger=logger) - - mock_load_incluster_config.assert_called_once() - - mock_watcher.assert_called_once() - args, kwargs = mock_watcher.call_args - resources = args[0] - - assert args[1] == logger - - assert "Deployment" in resources - assert "StatefulSet" in resources - assert "Job" in resources - - assert isinstance(resources["Deployment"], DeploymentResource) - assert isinstance(resources["StatefulSet"], StatefulSetResource) - assert isinstance(resources["Job"], JobResource) - - mock_watcher.return_value.watch_resources.assert_called_once() - - @patch("kubernetes.config.load_kube_config") - @patch("app.kg_exporter.KubernetesWatcher") - def test_kube( - self, - mock_watcher: MagicMock, - mock_load_kube_config: MagicMock, - logger: MagicMock, - ) -> None: - run_watcher(incluster=False, logger=logger) - - mock_load_kube_config.assert_called_once() - - mock_watcher.assert_called_once() - mock_watcher.return_value.watch_resources.assert_called_once() - - -class TestMain: - @patch.object(sys, "argv", ["kg_exporter.py"]) - @patch("logging.getLogger") - @patch("app.kg_exporter.run_watcher") - @patch("app.kg_exporter.run_periodic_push") - def test_common( - self, - mock_run_periodic_push: MagicMock, - mock_run_watcher: MagicMock, - mock_get_logger: MagicMock, - settings: Settings, - ) -> None: - main() - mock_run_watcher.assert_called_once_with( - False, - mock_get_logger.return_value, - background=True, - ) - mock_run_periodic_push.assert_called_once_with(settings) - - @patch.object(sys, "argv", ["kg_exporter.py", "--incluster"]) - @patch("logging.getLogger") - @patch("app.kg_exporter.run_watcher") - @patch("app.kg_exporter.run_periodic_push") - def test_incluster( - self, - mock_run_periodic_push: MagicMock, - mock_run_watcher: MagicMock, - mock_get_logger: MagicMock, - settings: Settings, - ) -> None: - main() - mock_run_watcher.assert_called_once_with( - True, - mock_get_logger.return_value, - background=True, - ) - mock_run_periodic_push.assert_called_once_with(settings) diff --git a/charts/app/values.yaml b/charts/app/values.yaml index c461cfa..0f22bcf 100644 --- a/charts/app/values.yaml +++ b/charts/app/values.yaml @@ -46,7 +46,6 @@ settings: k8s: in_cluster: true metadata: - metadata_service_push_period_sec: 60 metadata_service_url: metadata-service prometheus: endpoint_port: 8080 diff --git a/etc/config.yaml b/etc/config.yaml index cf781df..5d53b2c 100644 --- a/etc/config.yaml +++ b/etc/config.yaml @@ -15,7 +15,6 @@ influxdb: k8s: in_cluster: false metadata: - metadata_service_push_period_sec: 60 metadata_service_url: metadata.integration use_single_url: true prometheus: From b4a91444b5d6667cee16b4a810c40f0dda5f8396 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 11:54:45 +0200 Subject: [PATCH 53/61] HHT-669: moving metrics transformers --- app/core/kg_slice_assembler.py | 4 ++-- app/{metric_transform => transform}/__init__.py | 0 .../metrics}/__fixture__/node.jsonld | 0 .../metrics}/__fixture__/node.turtle | 0 .../metrics}/__fixture__/pod1.jsonld | 0 .../metrics}/__fixture__/pod1.turtle | 0 app/transform/metrics/__init__.py | 0 .../metrics}/metric_transformer.py | 0 .../metrics}/node_metric_transformer.py | 2 +- .../metrics}/pod_metric_transformer.py | 2 +- .../metrics}/test_base.py | 10 ++++++---- .../metrics}/test_node_metric_transformer.py | 4 ++-- .../metrics}/test_pod_metric_transformer.py | 4 ++-- 13 files changed, 14 insertions(+), 12 deletions(-) rename app/{metric_transform => transform}/__init__.py (100%) rename app/{metric_transform => transform/metrics}/__fixture__/node.jsonld (100%) rename app/{metric_transform => transform/metrics}/__fixture__/node.turtle (100%) rename app/{metric_transform => transform/metrics}/__fixture__/pod1.jsonld (100%) rename app/{metric_transform => transform/metrics}/__fixture__/pod1.turtle (100%) create mode 100644 app/transform/metrics/__init__.py rename app/{metric_transform => transform/metrics}/metric_transformer.py (100%) rename app/{metric_transform => transform/metrics}/node_metric_transformer.py (95%) rename app/{metric_transform => transform/metrics}/pod_metric_transformer.py (95%) rename app/{metric_transform => transform/metrics}/test_base.py (84%) rename app/{metric_transform => transform/metrics}/test_node_metric_transformer.py (94%) rename app/{metric_transform => transform/metrics}/test_pod_metric_transformer.py (94%) diff --git a/app/core/kg_slice_assembler.py b/app/core/kg_slice_assembler.py index 016a883..ab85531 100644 --- a/app/core/kg_slice_assembler.py +++ b/app/core/kg_slice_assembler.py @@ -12,8 +12,8 @@ from app.k8s_transform.workload_transformer import WorkloadToRDFTransformer from app.kg.graph import Graph from app.kg.inmemory_graph import InMemoryGraph -from app.metric_transform.node_metric_transformer import NodeMetricToGraphTransformer -from app.metric_transform.pod_metric_transformer import PodMetricToGraphTransformer +from app.transform.metrics.node_metric_transformer import NodeMetricToGraphTransformer +from app.transform.metrics.pod_metric_transformer import PodMetricToGraphTransformer class KGSliceAssembler: diff --git a/app/metric_transform/__init__.py b/app/transform/__init__.py similarity index 100% rename from app/metric_transform/__init__.py rename to app/transform/__init__.py diff --git a/app/metric_transform/__fixture__/node.jsonld b/app/transform/metrics/__fixture__/node.jsonld similarity index 100% rename from app/metric_transform/__fixture__/node.jsonld rename to app/transform/metrics/__fixture__/node.jsonld diff --git a/app/metric_transform/__fixture__/node.turtle b/app/transform/metrics/__fixture__/node.turtle similarity index 100% rename from app/metric_transform/__fixture__/node.turtle rename to app/transform/metrics/__fixture__/node.turtle diff --git a/app/metric_transform/__fixture__/pod1.jsonld b/app/transform/metrics/__fixture__/pod1.jsonld similarity index 100% rename from app/metric_transform/__fixture__/pod1.jsonld rename to app/transform/metrics/__fixture__/pod1.jsonld diff --git a/app/metric_transform/__fixture__/pod1.turtle b/app/transform/metrics/__fixture__/pod1.turtle similarity index 100% rename from app/metric_transform/__fixture__/pod1.turtle rename to app/transform/metrics/__fixture__/pod1.turtle diff --git a/app/transform/metrics/__init__.py b/app/transform/metrics/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/metric_transform/metric_transformer.py b/app/transform/metrics/metric_transformer.py similarity index 100% rename from app/metric_transform/metric_transformer.py rename to app/transform/metrics/metric_transformer.py diff --git a/app/metric_transform/node_metric_transformer.py b/app/transform/metrics/node_metric_transformer.py similarity index 95% rename from app/metric_transform/node_metric_transformer.py rename to app/transform/metrics/node_metric_transformer.py index e7c20b2..f8db030 100644 --- a/app/metric_transform/node_metric_transformer.py +++ b/app/transform/metrics/node_metric_transformer.py @@ -7,7 +7,7 @@ from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI -from app.metric_transform.metric_transformer import MetricToGraphTransformerBase +from app.transform.metrics.metric_transformer import MetricToGraphTransformerBase class NodeMetricToGraphTransformer(MetricToGraphTransformerBase, UpperOntologyBase): diff --git a/app/metric_transform/pod_metric_transformer.py b/app/transform/metrics/pod_metric_transformer.py similarity index 95% rename from app/metric_transform/pod_metric_transformer.py rename to app/transform/metrics/pod_metric_transformer.py index ec146ec..8352e88 100644 --- a/app/metric_transform/pod_metric_transformer.py +++ b/app/transform/metrics/pod_metric_transformer.py @@ -7,7 +7,7 @@ from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI -from app.metric_transform.metric_transformer import MetricToGraphTransformerBase +from app.transform.metrics.metric_transformer import MetricToGraphTransformerBase class PodMetricToGraphTransformer(MetricToGraphTransformerBase, UpperOntologyBase): diff --git a/app/metric_transform/test_base.py b/app/transform/metrics/test_base.py similarity index 84% rename from app/metric_transform/test_base.py rename to app/transform/metrics/test_base.py index 8f664df..14a79ec 100644 --- a/app/metric_transform/test_base.py +++ b/app/transform/metrics/test_base.py @@ -11,20 +11,22 @@ class MetricTransformTestBase(TestCase): + BASE_PATH = "app/transform/metrics/__fixture__/" + def load_turtle(self, name: str) -> str: - with FileIO(f"app/metric_transform/__fixture__/{name}.turtle") as f: + with FileIO(f"{self.BASE_PATH}/{name}.turtle") as f: return f.readall().decode("utf-8") def load_jsonld(self, name: str) -> str: - with FileIO(f"app/metric_transform/__fixture__/{name}.jsonld") as f: + with FileIO(f"{self.BASE_PATH}/{name}.jsonld") as f: return json.load(f) # type: ignore def load_json(self, name: str) -> Dict[str, Any]: - with FileIO(f"app/metric_transform/__fixture__/{name}.json") as f: + with FileIO(f"{self.BASE_PATH}/{name}.json") as f: return json.load(f) # type: ignore def load_json_list(self, name: str) -> List[Dict[str, Any]]: - with FileIO(f"app/metric_transform/__fixture__/{name}.json") as f: + with FileIO(f"{self.BASE_PATH}/{name}.json") as f: return json.load(f) # type: ignore def get_jsonld_config(self) -> JsonLDConfiguration: diff --git a/app/metric_transform/test_node_metric_transformer.py b/app/transform/metrics/test_node_metric_transformer.py similarity index 94% rename from app/metric_transform/test_node_metric_transformer.py rename to app/transform/metrics/test_node_metric_transformer.py index 8219f09..5b7be76 100644 --- a/app/metric_transform/test_node_metric_transformer.py +++ b/app/transform/metrics/test_node_metric_transformer.py @@ -5,10 +5,10 @@ from app.core.metric_value import MetricValue from app.k8s_transform.transformation_context import TransformationContext from app.kg.inmemory_graph import InMemoryGraph -from app.metric_transform.node_metric_transformer import NodeMetricToGraphTransformer -from app.metric_transform.test_base import MetricTransformTestBase from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.metrics.node_metric_transformer import NodeMetricToGraphTransformer +from app.transform.metrics.test_base import MetricTransformTestBase class NodeMetricToGraphTransformerTest(MetricTransformTestBase): diff --git a/app/metric_transform/test_pod_metric_transformer.py b/app/transform/metrics/test_pod_metric_transformer.py similarity index 94% rename from app/metric_transform/test_pod_metric_transformer.py rename to app/transform/metrics/test_pod_metric_transformer.py index 0a5ee47..b18b743 100644 --- a/app/metric_transform/test_pod_metric_transformer.py +++ b/app/transform/metrics/test_pod_metric_transformer.py @@ -5,10 +5,10 @@ from app.core.metric_value import MetricValue from app.k8s_transform.transformation_context import TransformationContext from app.kg.inmemory_graph import InMemoryGraph -from app.metric_transform.pod_metric_transformer import PodMetricToGraphTransformer -from app.metric_transform.test_base import MetricTransformTestBase from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.metrics.pod_metric_transformer import PodMetricToGraphTransformer +from app.transform.metrics.test_base import MetricTransformTestBase class PodMetricToGraphTransformerTest(MetricTransformTestBase): From 82a98e0f2ff7bdf15d86e83df3ef3ef6a234ba4f Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 12:05:35 +0200 Subject: [PATCH 54/61] HHT-669: moving k8s transformers --- app/core/kg_repository.py | 2 +- app/core/kg_slice_assembler.py | 12 ++++++------ app/core/test_graph_fixture.py | 2 +- app/core/test_snapshot_base.py | 2 +- .../k8s}/__fixture__/cluster.jsonld | 0 .../k8s}/__fixture__/cluster.kubeadm-config.json | 0 .../k8s}/__fixture__/cluster.nodes.json | 0 .../k8s}/__fixture__/cluster.turtle | 0 .../k8s}/__fixture__/deployment.json | 0 .../k8s}/__fixture__/deployment.jsonld | 0 .../k8s}/__fixture__/deployment.turtle | 0 .../k8s}/__fixture__/master_node.json | 0 .../k8s}/__fixture__/master_node.jsonld | 0 .../k8s}/__fixture__/master_node.turtle | 0 .../k8s}/__fixture__/pod1.json | 0 .../k8s}/__fixture__/pod1.jsonld | 0 .../k8s}/__fixture__/pod1.turtle | 0 .../k8s}/__fixture__/pod2.json | 0 .../k8s}/__fixture__/pod2.jsonld | 0 .../k8s}/__fixture__/pod2.turtle | 0 .../k8s}/__fixture__/pod3.json | 0 .../k8s}/__fixture__/pod3.jsonld | 0 .../k8s}/__fixture__/pod3.turtle | 0 .../k8s}/__fixture__/replicaset.json | 0 .../k8s}/__fixture__/replicaset.jsonld | 0 .../k8s}/__fixture__/replicaset.turtle | 0 .../k8s}/__fixture__/statefulset.json | 0 .../k8s}/__fixture__/statefulset.jsonld | 0 .../k8s}/__fixture__/statefulset.turtle | 0 .../k8s}/__fixture__/worker_node.json | 0 .../k8s}/__fixture__/worker_node.jsonld | 0 .../k8s}/__fixture__/worker_node.turtle | 0 app/{k8s_transform => transform/k8s}/__init__.py | 0 .../k8s}/cluster_transformer.py | 6 +++--- .../k8s}/node_transformer.py | 6 +++--- .../k8s}/pod_transformer.py | 6 +++--- app/{k8s_transform => transform/k8s}/test_base.py | 12 +++++++----- .../k8s}/test_cluster_transformer.py | 6 +++--- .../k8s}/test_node_transformer.py | 6 +++--- .../k8s}/test_pod_transformer.py | 6 +++--- .../k8s}/test_workload_transformer.py | 6 +++--- .../k8s}/transformation_context.py | 0 .../k8s}/transformer_base.py | 4 ++-- .../k8s}/upper_ontology_base.py | 0 .../k8s}/workload_transformer.py | 6 +++--- app/transform/metrics/metric_transformer.py | 2 +- app/transform/metrics/node_metric_transformer.py | 6 +++--- app/transform/metrics/pod_metric_transformer.py | 6 +++--- app/transform/metrics/test_base.py | 2 +- .../metrics/test_node_metric_transformer.py | 2 +- app/transform/metrics/test_pod_metric_transformer.py | 2 +- 51 files changed, 52 insertions(+), 50 deletions(-) rename app/{k8s_transform => transform/k8s}/__fixture__/cluster.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/cluster.kubeadm-config.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/cluster.nodes.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/cluster.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/deployment.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/deployment.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/deployment.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/master_node.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/master_node.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/master_node.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod1.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod1.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod1.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod2.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod2.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod2.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod3.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod3.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/pod3.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/replicaset.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/replicaset.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/replicaset.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/statefulset.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/statefulset.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/statefulset.turtle (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/worker_node.json (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/worker_node.jsonld (100%) rename app/{k8s_transform => transform/k8s}/__fixture__/worker_node.turtle (100%) rename app/{k8s_transform => transform/k8s}/__init__.py (100%) rename app/{k8s_transform => transform/k8s}/cluster_transformer.py (89%) rename app/{k8s_transform => transform/k8s}/node_transformer.py (96%) rename app/{k8s_transform => transform/k8s}/pod_transformer.py (95%) rename app/{k8s_transform => transform/k8s}/test_base.py (82%) rename app/{k8s_transform => transform/k8s}/test_cluster_transformer.py (89%) rename app/{k8s_transform => transform/k8s}/test_node_transformer.py (89%) rename app/{k8s_transform => transform/k8s}/test_pod_transformer.py (89%) rename app/{k8s_transform => transform/k8s}/test_workload_transformer.py (89%) rename app/{k8s_transform => transform/k8s}/transformation_context.py (100%) rename app/{k8s_transform => transform/k8s}/transformer_base.py (98%) rename app/{k8s_transform => transform/k8s}/upper_ontology_base.py (100%) rename app/{k8s_transform => transform/k8s}/workload_transformer.py (98%) diff --git a/app/core/kg_repository.py b/app/core/kg_repository.py index e42e801..86d2000 100644 --- a/app/core/kg_repository.py +++ b/app/core/kg_repository.py @@ -5,10 +5,10 @@ from app.clients.metadata_service.metadata_service_client import MetadataServiceClient from app.core.kg_result_parser import KGResultParser from app.core.types import KGSliceId -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.serialize.jsonld_configuration import JsonLDConfiguration from app.serialize.jsonld_serializer import JsonLDSerialializer +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class KGRepository: diff --git a/app/core/kg_slice_assembler.py b/app/core/kg_slice_assembler.py index ab85531..6140ba4 100644 --- a/app/core/kg_slice_assembler.py +++ b/app/core/kg_slice_assembler.py @@ -4,14 +4,14 @@ from app.clients.k8s.k8s_client import ResourceSnapshot from app.core.types import DKGSlice, KGSliceId, MetricSnapshot, SliceInputs -from app.k8s_transform.cluster_transformer import ClusterToRDFTransformer -from app.k8s_transform.node_transformer import NodesToRDFTransformer -from app.k8s_transform.pod_transformer import PodToRDFTransformer -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.transformer_base import TransformerBase -from app.k8s_transform.workload_transformer import WorkloadToRDFTransformer from app.kg.graph import Graph from app.kg.inmemory_graph import InMemoryGraph +from app.transform.k8s.cluster_transformer import ClusterToRDFTransformer +from app.transform.k8s.node_transformer import NodesToRDFTransformer +from app.transform.k8s.pod_transformer import PodToRDFTransformer +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.transformer_base import TransformerBase +from app.transform.k8s.workload_transformer import WorkloadToRDFTransformer from app.transform.metrics.node_metric_transformer import NodeMetricToGraphTransformer from app.transform.metrics.pod_metric_transformer import PodMetricToGraphTransformer diff --git a/app/core/test_graph_fixture.py b/app/core/test_graph_fixture.py index beedf2b..52630bf 100644 --- a/app/core/test_graph_fixture.py +++ b/app/core/test_graph_fixture.py @@ -3,12 +3,12 @@ import json from io import FileIO -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.id_base import IdBase from app.kg.inmemory_graph import InMemoryGraph from app.kg.iri import IRI from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class TestTransformer(UpperOntologyBase): diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index a749108..a95dafa 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -14,11 +14,11 @@ from app.core.metric_repository import MetricQuery, ResultParserId from app.core.metric_value import MetricValue from app.core.types import KGSliceId, MetricSnapshot, SliceInputs -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.id_base import IdBase from app.serialize.jsonld_configuration import JsonLDConfiguration from app.serialize.jsonld_serializer import JsonLDSerialializer +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class SnapshotTestBase: diff --git a/app/k8s_transform/__fixture__/cluster.jsonld b/app/transform/k8s/__fixture__/cluster.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/cluster.jsonld rename to app/transform/k8s/__fixture__/cluster.jsonld diff --git a/app/k8s_transform/__fixture__/cluster.kubeadm-config.json b/app/transform/k8s/__fixture__/cluster.kubeadm-config.json similarity index 100% rename from app/k8s_transform/__fixture__/cluster.kubeadm-config.json rename to app/transform/k8s/__fixture__/cluster.kubeadm-config.json diff --git a/app/k8s_transform/__fixture__/cluster.nodes.json b/app/transform/k8s/__fixture__/cluster.nodes.json similarity index 100% rename from app/k8s_transform/__fixture__/cluster.nodes.json rename to app/transform/k8s/__fixture__/cluster.nodes.json diff --git a/app/k8s_transform/__fixture__/cluster.turtle b/app/transform/k8s/__fixture__/cluster.turtle similarity index 100% rename from app/k8s_transform/__fixture__/cluster.turtle rename to app/transform/k8s/__fixture__/cluster.turtle diff --git a/app/k8s_transform/__fixture__/deployment.json b/app/transform/k8s/__fixture__/deployment.json similarity index 100% rename from app/k8s_transform/__fixture__/deployment.json rename to app/transform/k8s/__fixture__/deployment.json diff --git a/app/k8s_transform/__fixture__/deployment.jsonld b/app/transform/k8s/__fixture__/deployment.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/deployment.jsonld rename to app/transform/k8s/__fixture__/deployment.jsonld diff --git a/app/k8s_transform/__fixture__/deployment.turtle b/app/transform/k8s/__fixture__/deployment.turtle similarity index 100% rename from app/k8s_transform/__fixture__/deployment.turtle rename to app/transform/k8s/__fixture__/deployment.turtle diff --git a/app/k8s_transform/__fixture__/master_node.json b/app/transform/k8s/__fixture__/master_node.json similarity index 100% rename from app/k8s_transform/__fixture__/master_node.json rename to app/transform/k8s/__fixture__/master_node.json diff --git a/app/k8s_transform/__fixture__/master_node.jsonld b/app/transform/k8s/__fixture__/master_node.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/master_node.jsonld rename to app/transform/k8s/__fixture__/master_node.jsonld diff --git a/app/k8s_transform/__fixture__/master_node.turtle b/app/transform/k8s/__fixture__/master_node.turtle similarity index 100% rename from app/k8s_transform/__fixture__/master_node.turtle rename to app/transform/k8s/__fixture__/master_node.turtle diff --git a/app/k8s_transform/__fixture__/pod1.json b/app/transform/k8s/__fixture__/pod1.json similarity index 100% rename from app/k8s_transform/__fixture__/pod1.json rename to app/transform/k8s/__fixture__/pod1.json diff --git a/app/k8s_transform/__fixture__/pod1.jsonld b/app/transform/k8s/__fixture__/pod1.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/pod1.jsonld rename to app/transform/k8s/__fixture__/pod1.jsonld diff --git a/app/k8s_transform/__fixture__/pod1.turtle b/app/transform/k8s/__fixture__/pod1.turtle similarity index 100% rename from app/k8s_transform/__fixture__/pod1.turtle rename to app/transform/k8s/__fixture__/pod1.turtle diff --git a/app/k8s_transform/__fixture__/pod2.json b/app/transform/k8s/__fixture__/pod2.json similarity index 100% rename from app/k8s_transform/__fixture__/pod2.json rename to app/transform/k8s/__fixture__/pod2.json diff --git a/app/k8s_transform/__fixture__/pod2.jsonld b/app/transform/k8s/__fixture__/pod2.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/pod2.jsonld rename to app/transform/k8s/__fixture__/pod2.jsonld diff --git a/app/k8s_transform/__fixture__/pod2.turtle b/app/transform/k8s/__fixture__/pod2.turtle similarity index 100% rename from app/k8s_transform/__fixture__/pod2.turtle rename to app/transform/k8s/__fixture__/pod2.turtle diff --git a/app/k8s_transform/__fixture__/pod3.json b/app/transform/k8s/__fixture__/pod3.json similarity index 100% rename from app/k8s_transform/__fixture__/pod3.json rename to app/transform/k8s/__fixture__/pod3.json diff --git a/app/k8s_transform/__fixture__/pod3.jsonld b/app/transform/k8s/__fixture__/pod3.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/pod3.jsonld rename to app/transform/k8s/__fixture__/pod3.jsonld diff --git a/app/k8s_transform/__fixture__/pod3.turtle b/app/transform/k8s/__fixture__/pod3.turtle similarity index 100% rename from app/k8s_transform/__fixture__/pod3.turtle rename to app/transform/k8s/__fixture__/pod3.turtle diff --git a/app/k8s_transform/__fixture__/replicaset.json b/app/transform/k8s/__fixture__/replicaset.json similarity index 100% rename from app/k8s_transform/__fixture__/replicaset.json rename to app/transform/k8s/__fixture__/replicaset.json diff --git a/app/k8s_transform/__fixture__/replicaset.jsonld b/app/transform/k8s/__fixture__/replicaset.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/replicaset.jsonld rename to app/transform/k8s/__fixture__/replicaset.jsonld diff --git a/app/k8s_transform/__fixture__/replicaset.turtle b/app/transform/k8s/__fixture__/replicaset.turtle similarity index 100% rename from app/k8s_transform/__fixture__/replicaset.turtle rename to app/transform/k8s/__fixture__/replicaset.turtle diff --git a/app/k8s_transform/__fixture__/statefulset.json b/app/transform/k8s/__fixture__/statefulset.json similarity index 100% rename from app/k8s_transform/__fixture__/statefulset.json rename to app/transform/k8s/__fixture__/statefulset.json diff --git a/app/k8s_transform/__fixture__/statefulset.jsonld b/app/transform/k8s/__fixture__/statefulset.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/statefulset.jsonld rename to app/transform/k8s/__fixture__/statefulset.jsonld diff --git a/app/k8s_transform/__fixture__/statefulset.turtle b/app/transform/k8s/__fixture__/statefulset.turtle similarity index 100% rename from app/k8s_transform/__fixture__/statefulset.turtle rename to app/transform/k8s/__fixture__/statefulset.turtle diff --git a/app/k8s_transform/__fixture__/worker_node.json b/app/transform/k8s/__fixture__/worker_node.json similarity index 100% rename from app/k8s_transform/__fixture__/worker_node.json rename to app/transform/k8s/__fixture__/worker_node.json diff --git a/app/k8s_transform/__fixture__/worker_node.jsonld b/app/transform/k8s/__fixture__/worker_node.jsonld similarity index 100% rename from app/k8s_transform/__fixture__/worker_node.jsonld rename to app/transform/k8s/__fixture__/worker_node.jsonld diff --git a/app/k8s_transform/__fixture__/worker_node.turtle b/app/transform/k8s/__fixture__/worker_node.turtle similarity index 100% rename from app/k8s_transform/__fixture__/worker_node.turtle rename to app/transform/k8s/__fixture__/worker_node.turtle diff --git a/app/k8s_transform/__init__.py b/app/transform/k8s/__init__.py similarity index 100% rename from app/k8s_transform/__init__.py rename to app/transform/k8s/__init__.py diff --git a/app/k8s_transform/cluster_transformer.py b/app/transform/k8s/cluster_transformer.py similarity index 89% rename from app/k8s_transform/cluster_transformer.py rename to app/transform/k8s/cluster_transformer.py index 6fd270e..9fc1cc8 100644 --- a/app/k8s_transform/cluster_transformer.py +++ b/app/transform/k8s/cluster_transformer.py @@ -3,11 +3,11 @@ import yaml from jsonpath_ng.ext import parse -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.transformer_base import TransformerBase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.transformer_base import TransformerBase +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class ClusterToRDFTransformer(TransformerBase, UpperOntologyBase): diff --git a/app/k8s_transform/node_transformer.py b/app/transform/k8s/node_transformer.py similarity index 96% rename from app/k8s_transform/node_transformer.py rename to app/transform/k8s/node_transformer.py index 0e034f9..e27b562 100644 --- a/app/k8s_transform/node_transformer.py +++ b/app/transform/k8s/node_transformer.py @@ -1,10 +1,10 @@ from typing import Any, Dict -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.transformer_base import TransformerBase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.transformer_base import TransformerBase +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class NodesToRDFTransformer(TransformerBase, UpperOntologyBase): diff --git a/app/k8s_transform/pod_transformer.py b/app/transform/k8s/pod_transformer.py similarity index 95% rename from app/k8s_transform/pod_transformer.py rename to app/transform/k8s/pod_transformer.py index f8f7773..defa195 100644 --- a/app/k8s_transform/pod_transformer.py +++ b/app/transform/k8s/pod_transformer.py @@ -2,12 +2,12 @@ import re -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.transformer_base import TransformerBase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI from app.kg.literal import Literal +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.transformer_base import TransformerBase +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class PodToRDFTransformer(TransformerBase, UpperOntologyBase): diff --git a/app/k8s_transform/test_base.py b/app/transform/k8s/test_base.py similarity index 82% rename from app/k8s_transform/test_base.py rename to app/transform/k8s/test_base.py index 263ccea..fef74be 100644 --- a/app/k8s_transform/test_base.py +++ b/app/transform/k8s/test_base.py @@ -4,27 +4,29 @@ from io import FileIO from unittest import TestCase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.id_base import IdBase from app.kg.iri import IRI from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class TransformBaseTest(TestCase): + BASE_PATH = "app/transform/k8s/__fixture__" + def load_turtle(self, name: str) -> str: - with FileIO(f"app/k8s_transform/__fixture__/{name}.turtle") as f: + with FileIO(f"{self.BASE_PATH}/{name}.turtle") as f: return f.readall().decode("utf-8") def load_jsonld(self, name: str) -> str: - with FileIO(f"app/k8s_transform/__fixture__/{name}.jsonld") as f: + with FileIO(f"{self.BASE_PATH}/{name}.jsonld") as f: return json.load(f) # type: ignore def load_json(self, name: str) -> Dict[str, Any]: - with FileIO(f"app/k8s_transform/__fixture__/{name}.json") as f: + with FileIO(f"{self.BASE_PATH}/{name}.json") as f: return json.load(f) # type: ignore def load_json_list(self, name: str) -> List[Dict[str, Any]]: - with FileIO(f"app/k8s_transform/__fixture__/{name}.json") as f: + with FileIO(f"{self.BASE_PATH}/{name}.json") as f: return json.load(f) # type: ignore def get_jsonld_config(self) -> JsonLDConfiguration: diff --git a/app/k8s_transform/test_cluster_transformer.py b/app/transform/k8s/test_cluster_transformer.py similarity index 89% rename from app/k8s_transform/test_cluster_transformer.py rename to app/transform/k8s/test_cluster_transformer.py index 15b6644..bf749d0 100644 --- a/app/k8s_transform/test_cluster_transformer.py +++ b/app/transform/k8s/test_cluster_transformer.py @@ -1,12 +1,12 @@ import json from io import StringIO -from app.k8s_transform.cluster_transformer import ClusterToRDFTransformer -from app.k8s_transform.test_base import TransformBaseTest -from app.k8s_transform.transformation_context import TransformationContext from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.k8s.cluster_transformer import ClusterToRDFTransformer +from app.transform.k8s.test_base import TransformBaseTest +from app.transform.k8s.transformation_context import TransformationContext class ClusterTransformerTest(TransformBaseTest): diff --git a/app/k8s_transform/test_node_transformer.py b/app/transform/k8s/test_node_transformer.py similarity index 89% rename from app/k8s_transform/test_node_transformer.py rename to app/transform/k8s/test_node_transformer.py index 01b5fc7..270c9cc 100644 --- a/app/k8s_transform/test_node_transformer.py +++ b/app/transform/k8s/test_node_transformer.py @@ -1,12 +1,12 @@ import json from io import StringIO -from app.k8s_transform.node_transformer import NodesToRDFTransformer -from app.k8s_transform.test_base import TransformBaseTest -from app.k8s_transform.transformation_context import TransformationContext from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.k8s.node_transformer import NodesToRDFTransformer +from app.transform.k8s.test_base import TransformBaseTest +from app.transform.k8s.transformation_context import TransformationContext class NodeTransformerTest(TransformBaseTest): diff --git a/app/k8s_transform/test_pod_transformer.py b/app/transform/k8s/test_pod_transformer.py similarity index 89% rename from app/k8s_transform/test_pod_transformer.py rename to app/transform/k8s/test_pod_transformer.py index a764d90..5138814 100644 --- a/app/k8s_transform/test_pod_transformer.py +++ b/app/transform/k8s/test_pod_transformer.py @@ -1,12 +1,12 @@ import json from io import StringIO -from app.k8s_transform.pod_transformer import PodToRDFTransformer -from app.k8s_transform.test_base import TransformBaseTest -from app.k8s_transform.transformation_context import TransformationContext from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.k8s.pod_transformer import PodToRDFTransformer +from app.transform.k8s.test_base import TransformBaseTest +from app.transform.k8s.transformation_context import TransformationContext class PodTransformerTest(TransformBaseTest): diff --git a/app/k8s_transform/test_workload_transformer.py b/app/transform/k8s/test_workload_transformer.py similarity index 89% rename from app/k8s_transform/test_workload_transformer.py rename to app/transform/k8s/test_workload_transformer.py index 07bfe82..bdf8f67 100644 --- a/app/k8s_transform/test_workload_transformer.py +++ b/app/transform/k8s/test_workload_transformer.py @@ -1,12 +1,12 @@ import json from io import StringIO -from app.k8s_transform.test_base import TransformBaseTest -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.workload_transformer import WorkloadToRDFTransformer from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.k8s.test_base import TransformBaseTest +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.workload_transformer import WorkloadToRDFTransformer class WorkloadTransformerTest(TransformBaseTest): diff --git a/app/k8s_transform/transformation_context.py b/app/transform/k8s/transformation_context.py similarity index 100% rename from app/k8s_transform/transformation_context.py rename to app/transform/k8s/transformation_context.py diff --git a/app/k8s_transform/transformer_base.py b/app/transform/k8s/transformer_base.py similarity index 98% rename from app/k8s_transform/transformer_base.py rename to app/transform/k8s/transformer_base.py index c4291da..d49f723 100644 --- a/app/k8s_transform/transformer_base.py +++ b/app/transform/k8s/transformer_base.py @@ -5,11 +5,11 @@ from jsonpath_ng.ext import parse from kubernetes.utils.quantity import parse_quantity -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI from app.kg.literal import Literal +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class TransformerBase: diff --git a/app/k8s_transform/upper_ontology_base.py b/app/transform/k8s/upper_ontology_base.py similarity index 100% rename from app/k8s_transform/upper_ontology_base.py rename to app/transform/k8s/upper_ontology_base.py diff --git a/app/k8s_transform/workload_transformer.py b/app/transform/k8s/workload_transformer.py similarity index 98% rename from app/k8s_transform/workload_transformer.py rename to app/transform/k8s/workload_transformer.py index 6aaf8e4..f5faed8 100644 --- a/app/k8s_transform/workload_transformer.py +++ b/app/transform/k8s/workload_transformer.py @@ -2,11 +2,11 @@ from kubernetes.utils.quantity import parse_quantity -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.transformer_base import TransformerBase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.transformer_base import TransformerBase +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class WorkloadToRDFTransformer(TransformerBase, UpperOntologyBase): diff --git a/app/transform/metrics/metric_transformer.py b/app/transform/metrics/metric_transformer.py index fa44168..da3adbd 100644 --- a/app/transform/metrics/metric_transformer.py +++ b/app/transform/metrics/metric_transformer.py @@ -2,8 +2,8 @@ from app.core.metric_repository import MetricQuery from app.core.metric_value import MetricValue -from app.k8s_transform.transformation_context import TransformationContext from app.kg.graph import Graph +from app.transform.k8s.transformation_context import TransformationContext class MetricToGraphTransformerBase: diff --git a/app/transform/metrics/node_metric_transformer.py b/app/transform/metrics/node_metric_transformer.py index f8db030..8e8b9a9 100644 --- a/app/transform/metrics/node_metric_transformer.py +++ b/app/transform/metrics/node_metric_transformer.py @@ -2,11 +2,11 @@ from app.core.metric_repository import MetricQuery from app.core.metric_value import MetricValue -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.transformer_base import TransformerBase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.transformer_base import TransformerBase +from app.transform.k8s.upper_ontology_base import UpperOntologyBase from app.transform.metrics.metric_transformer import MetricToGraphTransformerBase diff --git a/app/transform/metrics/pod_metric_transformer.py b/app/transform/metrics/pod_metric_transformer.py index 8352e88..a2ba254 100644 --- a/app/transform/metrics/pod_metric_transformer.py +++ b/app/transform/metrics/pod_metric_transformer.py @@ -2,11 +2,11 @@ from app.core.metric_repository import MetricQuery from app.core.metric_value import MetricValue -from app.k8s_transform.transformation_context import TransformationContext -from app.k8s_transform.transformer_base import TransformerBase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.graph import Graph from app.kg.iri import IRI +from app.transform.k8s.transformation_context import TransformationContext +from app.transform.k8s.transformer_base import TransformerBase +from app.transform.k8s.upper_ontology_base import UpperOntologyBase from app.transform.metrics.metric_transformer import MetricToGraphTransformerBase diff --git a/app/transform/metrics/test_base.py b/app/transform/metrics/test_base.py index 14a79ec..6bbe9dc 100644 --- a/app/transform/metrics/test_base.py +++ b/app/transform/metrics/test_base.py @@ -4,10 +4,10 @@ from io import FileIO from unittest import TestCase -from app.k8s_transform.upper_ontology_base import UpperOntologyBase from app.kg.id_base import IdBase from app.kg.iri import IRI from app.serialize.jsonld_configuration import JsonLDConfiguration +from app.transform.k8s.upper_ontology_base import UpperOntologyBase class MetricTransformTestBase(TestCase): diff --git a/app/transform/metrics/test_node_metric_transformer.py b/app/transform/metrics/test_node_metric_transformer.py index 5b7be76..61e62c5 100644 --- a/app/transform/metrics/test_node_metric_transformer.py +++ b/app/transform/metrics/test_node_metric_transformer.py @@ -3,10 +3,10 @@ from app.core.metric_repository import MetricQuery, ResultParserId from app.core.metric_value import MetricValue -from app.k8s_transform.transformation_context import TransformationContext from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.k8s.transformation_context import TransformationContext from app.transform.metrics.node_metric_transformer import NodeMetricToGraphTransformer from app.transform.metrics.test_base import MetricTransformTestBase diff --git a/app/transform/metrics/test_pod_metric_transformer.py b/app/transform/metrics/test_pod_metric_transformer.py index b18b743..ae4a0a1 100644 --- a/app/transform/metrics/test_pod_metric_transformer.py +++ b/app/transform/metrics/test_pod_metric_transformer.py @@ -3,10 +3,10 @@ from app.core.metric_repository import MetricQuery, ResultParserId from app.core.metric_value import MetricValue -from app.k8s_transform.transformation_context import TransformationContext from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer +from app.transform.k8s.transformation_context import TransformationContext from app.transform.metrics.pod_metric_transformer import PodMetricToGraphTransformer from app.transform.metrics.test_base import MetricTransformTestBase From d562f06113400dba52799ca49861c2c17eae72a3 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 12:06:40 +0200 Subject: [PATCH 55/61] HHT-669: cleanup --- app/core/kg_builder.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/core/kg_builder.py b/app/core/kg_builder.py index 883cbe7..669ba2c 100644 --- a/app/core/kg_builder.py +++ b/app/core/kg_builder.py @@ -21,7 +21,6 @@ class QuerySettings(BaseSettings): pod_queries: List[MetricQuery] = [] node_queries: List[MetricQuery] = [] - workload_queries: List[MetricQuery] = [] class KGBuilderSettings(BaseSettings): From c3a336b92471635c794f69e1c32acc6f623b917c Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 12:12:56 +0200 Subject: [PATCH 56/61] HHT-669: extracting kg builder --- app/core/builder/__init__.py | 0 app/core/{ => builder}/kg_builder.py | 8 ++++---- app/core/{ => builder}/kg_slice_assembler.py | 0 app/core/{ => builder}/resource_snapshot_index.py | 0 app/core/builder/slice_strategy/__init__.py | 0 .../{ => builder/slice_strategy}/single_slice_strategy.py | 2 +- .../slice_strategy}/slice_for_node_strategy.py | 4 ++-- app/core/{ => builder/slice_strategy}/slice_strategy.py | 0 .../slice_strategy}/test_single_slice_strategy.py | 2 +- .../slice_strategy}/test_slice_for_node_strategy.py | 2 +- app/core/{ => builder}/test_kg_builder.py | 2 +- app/core/{ => builder}/test_kgslice_assembler.py | 2 +- app/core/test_snapshot_base.py | 2 +- app/kgexporter_context.py | 2 +- app/kgexporter_settings.py | 2 +- app/test_kgexporter_context.py | 2 +- app/test_pydantic_yaml.py | 2 +- 17 files changed, 16 insertions(+), 16 deletions(-) create mode 100644 app/core/builder/__init__.py rename app/core/{ => builder}/kg_builder.py (94%) rename app/core/{ => builder}/kg_slice_assembler.py (100%) rename app/core/{ => builder}/resource_snapshot_index.py (100%) create mode 100644 app/core/builder/slice_strategy/__init__.py rename app/core/{ => builder/slice_strategy}/single_slice_strategy.py (92%) rename app/core/{ => builder/slice_strategy}/slice_for_node_strategy.py (96%) rename app/core/{ => builder/slice_strategy}/slice_strategy.py (100%) rename app/core/{ => builder/slice_strategy}/test_single_slice_strategy.py (97%) rename app/core/{ => builder/slice_strategy}/test_slice_for_node_strategy.py (97%) rename app/core/{ => builder}/test_kg_builder.py (97%) rename app/core/{ => builder}/test_kgslice_assembler.py (95%) diff --git a/app/core/builder/__init__.py b/app/core/builder/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/kg_builder.py b/app/core/builder/kg_builder.py similarity index 94% rename from app/core/kg_builder.py rename to app/core/builder/kg_builder.py index 669ba2c..16c9988 100644 --- a/app/core/kg_builder.py +++ b/app/core/builder/kg_builder.py @@ -8,12 +8,12 @@ from app.clients.k8s.k8s_client import K8SClient from app.core.async_queue import AsyncQueue +from app.core.builder.kg_slice_assembler import KGSliceAssembler +from app.core.builder.slice_strategy.single_slice_strategy import SingleSliceStrategy +from app.core.builder.slice_strategy.slice_for_node_strategy import SliceForNodeStrategy +from app.core.builder.slice_strategy.slice_strategy import SliceStrategy from app.core.kg_repository import KGRepository -from app.core.kg_slice_assembler import KGSliceAssembler from app.core.metric_repository import MetricQuery, MetricRepository -from app.core.single_slice_strategy import SingleSliceStrategy -from app.core.slice_for_node_strategy import SliceForNodeStrategy -from app.core.slice_strategy import SliceStrategy from app.core.types import DKGSlice, MetricSnapshot from app.util.clock import Clock diff --git a/app/core/kg_slice_assembler.py b/app/core/builder/kg_slice_assembler.py similarity index 100% rename from app/core/kg_slice_assembler.py rename to app/core/builder/kg_slice_assembler.py diff --git a/app/core/resource_snapshot_index.py b/app/core/builder/resource_snapshot_index.py similarity index 100% rename from app/core/resource_snapshot_index.py rename to app/core/builder/resource_snapshot_index.py diff --git a/app/core/builder/slice_strategy/__init__.py b/app/core/builder/slice_strategy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/single_slice_strategy.py b/app/core/builder/slice_strategy/single_slice_strategy.py similarity index 92% rename from app/core/single_slice_strategy.py rename to app/core/builder/slice_strategy/single_slice_strategy.py index 207892f..6fe74ff 100644 --- a/app/core/single_slice_strategy.py +++ b/app/core/builder/slice_strategy/single_slice_strategy.py @@ -3,7 +3,7 @@ from urllib.parse import urlparse from app.clients.k8s.k8s_client import ResourceSnapshot -from app.core.slice_strategy import SliceStrategy +from app.core.builder.slice_strategy.slice_strategy import SliceStrategy from app.core.types import KGSliceId, MetricSnapshot, SliceInputs diff --git a/app/core/slice_for_node_strategy.py b/app/core/builder/slice_strategy/slice_for_node_strategy.py similarity index 96% rename from app/core/slice_for_node_strategy.py rename to app/core/builder/slice_strategy/slice_for_node_strategy.py index d202e28..3b98177 100644 --- a/app/core/slice_for_node_strategy.py +++ b/app/core/builder/slice_strategy/slice_for_node_strategy.py @@ -3,8 +3,8 @@ from jsonpath_ng.ext import parse from app.clients.k8s.k8s_client import ResourceSnapshot -from app.core.resource_snapshot_index import ResourceSnapshotIndex -from app.core.slice_strategy import SliceStrategy +from app.core.builder.resource_snapshot_index import ResourceSnapshotIndex +from app.core.builder.slice_strategy.slice_strategy import SliceStrategy from app.core.types import KGSliceId, MetricSnapshot, SliceInputs ReferenceKind: TypeAlias = str diff --git a/app/core/slice_strategy.py b/app/core/builder/slice_strategy/slice_strategy.py similarity index 100% rename from app/core/slice_strategy.py rename to app/core/builder/slice_strategy/slice_strategy.py diff --git a/app/core/test_single_slice_strategy.py b/app/core/builder/slice_strategy/test_single_slice_strategy.py similarity index 97% rename from app/core/test_single_slice_strategy.py rename to app/core/builder/slice_strategy/test_single_slice_strategy.py index 5998287..1d67bd1 100644 --- a/app/core/test_single_slice_strategy.py +++ b/app/core/builder/slice_strategy/test_single_slice_strategy.py @@ -1,6 +1,6 @@ from unittest import TestCase -from app.core.single_slice_strategy import SingleSliceStrategy +from app.core.builder.slice_strategy.single_slice_strategy import SingleSliceStrategy from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId diff --git a/app/core/test_slice_for_node_strategy.py b/app/core/builder/slice_strategy/test_slice_for_node_strategy.py similarity index 97% rename from app/core/test_slice_for_node_strategy.py rename to app/core/builder/slice_strategy/test_slice_for_node_strategy.py index ce0adce..678219b 100644 --- a/app/core/test_slice_for_node_strategy.py +++ b/app/core/builder/slice_strategy/test_slice_for_node_strategy.py @@ -1,6 +1,6 @@ from unittest import TestCase -from app.core.slice_for_node_strategy import SliceForNodeStrategy +from app.core.builder.slice_strategy.slice_for_node_strategy import SliceForNodeStrategy from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId diff --git a/app/core/test_kg_builder.py b/app/core/builder/test_kg_builder.py similarity index 97% rename from app/core/test_kg_builder.py rename to app/core/builder/test_kg_builder.py index f033da0..25cba8d 100644 --- a/app/core/test_kg_builder.py +++ b/app/core/builder/test_kg_builder.py @@ -8,7 +8,7 @@ MockMetadataServiceClient, ) from app.core.async_queue import AsyncQueue -from app.core.kg_builder import KGBuilder, KGBuilderSettings, QuerySettings +from app.core.builder.kg_builder import KGBuilder, KGBuilderSettings, QuerySettings from app.core.kg_repository import KGRepository from app.core.metric_repository import MetricRepository from app.core.test_graph_fixture import TestGraphFixture diff --git a/app/core/test_kgslice_assembler.py b/app/core/builder/test_kgslice_assembler.py similarity index 95% rename from app/core/test_kgslice_assembler.py rename to app/core/builder/test_kgslice_assembler.py index 5fd8f98..e9b58f7 100644 --- a/app/core/test_kgslice_assembler.py +++ b/app/core/builder/test_kgslice_assembler.py @@ -2,7 +2,7 @@ from unittest import TestCase -from app.core.kg_slice_assembler import KGSliceAssembler +from app.core.builder.kg_slice_assembler import KGSliceAssembler from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId from app.kg.iri import IRI diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index a95dafa..f4cdf6c 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -10,7 +10,7 @@ from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient from app.clients.k8s.k8s_client import ResourceSnapshot from app.clients.k8s.mock_k8s_client import MockK8SClient -from app.core.kg_builder import QuerySettings +from app.core.builder.kg_builder import QuerySettings from app.core.metric_repository import MetricQuery, ResultParserId from app.core.metric_value import MetricValue from app.core.types import KGSliceId, MetricSnapshot, SliceInputs diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 8c088af..14c4441 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -10,7 +10,7 @@ from app.clients.k8s.k8s_client import K8SClient from app.clients.metadata_service.metadata_service_client import MetadataServiceClient from app.core.async_queue import AsyncQueue -from app.core.kg_builder import KGBuilder +from app.core.builder.kg_builder import KGBuilder from app.core.kg_repository import KGRepository from app.core.kg_updater import KGUpdater from app.core.metric_repository import MetricRepository diff --git a/app/kgexporter_settings.py b/app/kgexporter_settings.py index 198f5eb..e97f4c5 100644 --- a/app/kgexporter_settings.py +++ b/app/kgexporter_settings.py @@ -6,7 +6,7 @@ MetadataServiceSettings, ) from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings -from app.core.kg_builder import KGBuilderSettings +from app.core.builder.kg_builder import KGBuilderSettings class PrometheusSettings(BaseSettings): diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 2270787..7d6aa6c 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -14,7 +14,7 @@ SerializedGraph, ) from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings -from app.core.kg_builder import KGBuilderSettings, QuerySettings +from app.core.builder.kg_builder import KGBuilderSettings, QuerySettings from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId from app.kgexporter_context import KGExporterContext diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py index ae65ecd..72d9608 100644 --- a/app/test_pydantic_yaml.py +++ b/app/test_pydantic_yaml.py @@ -7,7 +7,7 @@ MetadataServiceSettings, ) from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings -from app.core.kg_builder import KGBuilderSettings, QuerySettings +from app.core.builder.kg_builder import KGBuilderSettings, QuerySettings from app.kgexporter_settings import KGExporterSettings, PrometheusSettings from app.pydantic_yaml import from_yaml, to_yaml From cc7ba6385033a3fdea122e72d9dd7d1fb118799f Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 13:43:00 +0200 Subject: [PATCH 57/61] HHT-669: extracting kg updater and repository --- app/clients/influxdb/influxdb_client_impl.py | 6 +-- app/clients/influxdb/mock_infuxdbclient.py | 6 +-- app/clients/influxdb/simple_result_parser.py | 4 +- .../influxdb/test_simple_result_parser.py | 2 +- app/clients/k8s/k8s_client.py | 1 + .../prometheus/mock_prometheus_client.py | 6 +-- app/clients/prometheus/prometheus_client.py | 6 +-- .../prometheus/prometheus_result_parser.py | 4 +- .../test_prometheus_result_parser.py | 2 +- app/core/builder/kg_builder.py | 5 +- app/core/builder/test_kg_builder.py | 4 +- app/core/metric_repository.py | 49 ------------------- app/core/metric_value.py | 14 ------ app/core/repository/__init__.py | 0 app/core/repository/metric_repository.py | 25 ++++++++++ .../repository}/metricstore_client.py | 4 +- .../repository}/query_result_parser.py | 2 +- .../test_metric_repository.py | 5 +- app/core/repository/types.py | 34 +++++++++++++ app/core/test_snapshot_base.py | 5 +- app/core/types.py | 16 ++++-- app/core/updater/__init__.py | 0 app/core/{ => updater}/kg_repository.py | 2 +- app/core/{ => updater}/kg_result_parser.py | 0 app/core/{ => updater}/kg_tuple_parser.py | 2 +- app/core/{ => updater}/kg_updater.py | 2 +- app/core/{ => updater}/test_kg_repository.py | 4 +- .../{ => updater}/test_kg_tuple_parser.py | 2 +- app/core/{ => updater}/test_kg_updater.py | 4 +- app/kgexporter_context.py | 8 +-- app/transform/metrics/metric_transformer.py | 4 +- .../metrics/node_metric_transformer.py | 4 +- .../metrics/pod_metric_transformer.py | 4 +- .../metrics/test_node_metric_transformer.py | 4 +- .../metrics/test_pod_metric_transformer.py | 4 +- 35 files changed, 126 insertions(+), 118 deletions(-) delete mode 100644 app/core/metric_repository.py delete mode 100644 app/core/metric_value.py create mode 100644 app/core/repository/__init__.py create mode 100644 app/core/repository/metric_repository.py rename app/{clients/influxdb => core/repository}/metricstore_client.py (62%) rename app/{clients/influxdb => core/repository}/query_result_parser.py (94%) rename app/core/{ => repository}/test_metric_repository.py (93%) create mode 100644 app/core/repository/types.py create mode 100644 app/core/updater/__init__.py rename app/core/{ => updater}/kg_repository.py (96%) rename app/core/{ => updater}/kg_result_parser.py (100%) rename app/core/{ => updater}/kg_tuple_parser.py (83%) rename app/core/{ => updater}/kg_updater.py (97%) rename app/core/{ => updater}/test_kg_repository.py (91%) rename app/core/{ => updater}/test_kg_tuple_parser.py (82%) rename app/core/{ => updater}/test_kg_updater.py (94%) diff --git a/app/clients/influxdb/influxdb_client_impl.py b/app/clients/influxdb/influxdb_client_impl.py index b5a7525..0cfc8fb 100644 --- a/app/clients/influxdb/influxdb_client_impl.py +++ b/app/clients/influxdb/influxdb_client_impl.py @@ -4,9 +4,9 @@ from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync from app.clients.influxdb.influxdb_settings import InfluxDBSettings -from app.clients.influxdb.metricstore_client import MetricStoreClient -from app.clients.influxdb.query_result_parser import QueryResultParser -from app.core.metric_value import MetricValue +from app.core.repository.metricstore_client import MetricStoreClient +from app.core.repository.query_result_parser import QueryResultParser +from app.core.types import MetricValue class InfluxDBClientImpl(MetricStoreClient): diff --git a/app/clients/influxdb/mock_infuxdbclient.py b/app/clients/influxdb/mock_infuxdbclient.py index fa26400..b24cb45 100644 --- a/app/clients/influxdb/mock_infuxdbclient.py +++ b/app/clients/influxdb/mock_infuxdbclient.py @@ -1,8 +1,8 @@ from typing import Dict, List -from app.clients.influxdb.metricstore_client import MetricStoreClient -from app.clients.influxdb.query_result_parser import QueryResultParser -from app.core.metric_value import MetricValue +from app.core.repository.metricstore_client import MetricStoreClient +from app.core.repository.query_result_parser import QueryResultParser +from app.core.types import MetricValue class MockInfluxDBClient(MetricStoreClient): diff --git a/app/clients/influxdb/simple_result_parser.py b/app/clients/influxdb/simple_result_parser.py index 374d466..9b32498 100644 --- a/app/clients/influxdb/simple_result_parser.py +++ b/app/clients/influxdb/simple_result_parser.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List -from app.clients.influxdb.query_result_parser import QueryResultParser -from app.core.metric_value import MetricValue +from app.core.repository.query_result_parser import QueryResultParser +from app.core.types import MetricValue class SimpleResultParser(QueryResultParser): diff --git a/app/clients/influxdb/test_simple_result_parser.py b/app/clients/influxdb/test_simple_result_parser.py index a48a7fc..2a03d02 100644 --- a/app/clients/influxdb/test_simple_result_parser.py +++ b/app/clients/influxdb/test_simple_result_parser.py @@ -4,7 +4,7 @@ from dateutil.tz import tzutc from app.clients.influxdb.simple_result_parser import SimpleResultParser -from app.core.metric_value import MetricValue +from app.core.types import MetricValue class SimpleResultParserTest(TestCase): diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py index b1c84c3..e636875 100644 --- a/app/clients/k8s/k8s_client.py +++ b/app/clients/k8s/k8s_client.py @@ -54,6 +54,7 @@ def find_resources_by_kind_and_identity( else: return [] + # TODO remove def get_resource_name(self, node: Dict[str, Any]) -> str: for match in parse("$.metadata.name").find(node): return str(match.value) diff --git a/app/clients/prometheus/mock_prometheus_client.py b/app/clients/prometheus/mock_prometheus_client.py index d122a00..9eb171f 100644 --- a/app/clients/prometheus/mock_prometheus_client.py +++ b/app/clients/prometheus/mock_prometheus_client.py @@ -1,8 +1,8 @@ from typing import Dict, List -from app.clients.influxdb.metricstore_client import MetricStoreClient -from app.clients.influxdb.query_result_parser import QueryResultParser -from app.core.metric_value import MetricValue +from app.core.repository.metricstore_client import MetricStoreClient +from app.core.repository.query_result_parser import QueryResultParser +from app.core.types import MetricValue class MockPrometheusClient(MetricStoreClient): diff --git a/app/clients/prometheus/prometheus_client.py b/app/clients/prometheus/prometheus_client.py index b6ae680..b93df04 100644 --- a/app/clients/prometheus/prometheus_client.py +++ b/app/clients/prometheus/prometheus_client.py @@ -2,10 +2,10 @@ from aioprometheus_api_client import PrometheusConnect -from app.clients.influxdb.metricstore_client import MetricStoreClient -from app.clients.influxdb.query_result_parser import QueryResultParser from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings -from app.core.metric_value import MetricValue +from app.core.repository.metricstore_client import MetricStoreClient +from app.core.repository.query_result_parser import QueryResultParser +from app.core.types import MetricValue class PrometheusClient(MetricStoreClient): diff --git a/app/clients/prometheus/prometheus_result_parser.py b/app/clients/prometheus/prometheus_result_parser.py index 2fcf75a..cc1df81 100644 --- a/app/clients/prometheus/prometheus_result_parser.py +++ b/app/clients/prometheus/prometheus_result_parser.py @@ -1,7 +1,7 @@ from typing import Any, Dict, List -from app.clients.influxdb.query_result_parser import QueryResultParser -from app.core.metric_value import MetricValue +from app.core.repository.query_result_parser import QueryResultParser +from app.core.types import MetricValue class PrometheusResultParser(QueryResultParser): diff --git a/app/clients/prometheus/test_prometheus_result_parser.py b/app/clients/prometheus/test_prometheus_result_parser.py index 8c2b327..052a8ac 100644 --- a/app/clients/prometheus/test_prometheus_result_parser.py +++ b/app/clients/prometheus/test_prometheus_result_parser.py @@ -3,7 +3,7 @@ from unittest import TestCase from app.clients.prometheus.prometheus_result_parser import PrometheusResultParser -from app.core.metric_value import MetricValue +from app.core.types import MetricValue class PrometheusResultParserTest(TestCase): diff --git a/app/core/builder/kg_builder.py b/app/core/builder/kg_builder.py index 16c9988..5076853 100644 --- a/app/core/builder/kg_builder.py +++ b/app/core/builder/kg_builder.py @@ -12,9 +12,10 @@ from app.core.builder.slice_strategy.single_slice_strategy import SingleSliceStrategy from app.core.builder.slice_strategy.slice_for_node_strategy import SliceForNodeStrategy from app.core.builder.slice_strategy.slice_strategy import SliceStrategy -from app.core.kg_repository import KGRepository -from app.core.metric_repository import MetricQuery, MetricRepository +from app.core.repository.metric_repository import MetricRepository +from app.core.repository.types import MetricQuery from app.core.types import DKGSlice, MetricSnapshot +from app.core.updater.kg_repository import KGRepository from app.util.clock import Clock diff --git a/app/core/builder/test_kg_builder.py b/app/core/builder/test_kg_builder.py index 25cba8d..afd6741 100644 --- a/app/core/builder/test_kg_builder.py +++ b/app/core/builder/test_kg_builder.py @@ -9,11 +9,11 @@ ) from app.core.async_queue import AsyncQueue from app.core.builder.kg_builder import KGBuilder, KGBuilderSettings, QuerySettings -from app.core.kg_repository import KGRepository -from app.core.metric_repository import MetricRepository +from app.core.repository.metric_repository import MetricRepository from app.core.test_graph_fixture import TestGraphFixture from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import DKGSlice, KGSliceId +from app.core.updater.kg_repository import KGRepository from app.kg.inmemory_graph import InMemoryGraph from app.util.clock import Clock from app.util.mock_clock import MockClock diff --git a/app/core/metric_repository.py b/app/core/metric_repository.py deleted file mode 100644 index fc81ba8..0000000 --- a/app/core/metric_repository.py +++ /dev/null @@ -1,49 +0,0 @@ -from typing import List, Optional - -import asyncio -from dataclasses import dataclass -from enum import StrEnum - -from app.clients.influxdb.metricstore_client import MetricStoreClient -from app.clients.influxdb.query_result_parser import QueryResultParser -from app.clients.influxdb.simple_result_parser import SimpleResultParser -from app.core.metric_value import MetricValue - - -class ResultParserId(StrEnum): - SIMPLE_RESULT_PARSER = "SimpleResultParser" - - def get_by_name(self) -> QueryResultParser: - if self.name == "SIMPLE_RESULT_PARSER": - return SimpleResultParser() - else: - raise Exception(f"Unknown parser for {self}") - - -@dataclass -class MetricQuery: - measurement_id: str - subresource: Optional[str] - source: str - unit: str - property: str - query: str - result_parser: ResultParserId - - -class MetricRepository: - client: MetricStoreClient - - def __init__(self, client: MetricStoreClient): - self.client = client - - async def query_many( - self, now: int, queries: List[MetricQuery] - ) -> List[MetricValue]: - query_futures = [self.query_one(now, query) for query in queries] - query_results: List[List[MetricValue]] = await asyncio.gather(*query_futures) - return [element for elements in query_results for element in elements] - - async def query_one(self, now: int, query: MetricQuery) -> List[MetricValue]: - result_parser = query.result_parser.get_by_name() - return await self.client.query(query.query, result_parser) diff --git a/app/core/metric_value.py b/app/core/metric_value.py deleted file mode 100644 index 0a98088..0000000 --- a/app/core/metric_value.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import TypeAlias - -from dataclasses import dataclass - -MetricId: TypeAlias = str -ResourceId: TypeAlias = str - - -@dataclass -class MetricValue: - metric_id: MetricId - resource_id: ResourceId - timestamp: int - value: float diff --git a/app/core/repository/__init__.py b/app/core/repository/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/repository/metric_repository.py b/app/core/repository/metric_repository.py new file mode 100644 index 0000000..5c1a97a --- /dev/null +++ b/app/core/repository/metric_repository.py @@ -0,0 +1,25 @@ +from typing import List + +import asyncio + +from app.core.repository.metricstore_client import MetricStoreClient +from app.core.repository.types import MetricQuery +from app.core.types import MetricValue + + +class MetricRepository: + client: MetricStoreClient + + def __init__(self, client: MetricStoreClient): + self.client = client + + async def query_many( + self, now: int, queries: List[MetricQuery] + ) -> List[MetricValue]: + query_futures = [self.query_one(now, query) for query in queries] + query_results: List[List[MetricValue]] = await asyncio.gather(*query_futures) + return [element for elements in query_results for element in elements] + + async def query_one(self, now: int, query: MetricQuery) -> List[MetricValue]: + result_parser = query.result_parser.get_by_name() + return await self.client.query(query.query, result_parser) diff --git a/app/clients/influxdb/metricstore_client.py b/app/core/repository/metricstore_client.py similarity index 62% rename from app/clients/influxdb/metricstore_client.py rename to app/core/repository/metricstore_client.py index d72f95d..2af5749 100644 --- a/app/clients/influxdb/metricstore_client.py +++ b/app/core/repository/metricstore_client.py @@ -1,7 +1,7 @@ from typing import List -from app.clients.influxdb.query_result_parser import QueryResultParser -from app.core.metric_value import MetricValue +from app.core.repository.query_result_parser import QueryResultParser +from app.core.types import MetricValue class MetricStoreClient: diff --git a/app/clients/influxdb/query_result_parser.py b/app/core/repository/query_result_parser.py similarity index 94% rename from app/clients/influxdb/query_result_parser.py rename to app/core/repository/query_result_parser.py index d5876fa..2abed65 100644 --- a/app/clients/influxdb/query_result_parser.py +++ b/app/core/repository/query_result_parser.py @@ -2,7 +2,7 @@ from datetime import datetime -from app.core.metric_value import MetricValue +from app.core.types import MetricValue class QueryResultParser: diff --git a/app/core/test_metric_repository.py b/app/core/repository/test_metric_repository.py similarity index 93% rename from app/core/test_metric_repository.py rename to app/core/repository/test_metric_repository.py index f5a4630..789a272 100644 --- a/app/core/test_metric_repository.py +++ b/app/core/repository/test_metric_repository.py @@ -2,8 +2,9 @@ from unittest import TestCase from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient -from app.core.metric_repository import MetricQuery, MetricRepository, ResultParserId -from app.core.metric_value import MetricValue +from app.core.repository.metric_repository import MetricRepository +from app.core.repository.types import MetricQuery, ResultParserId +from app.core.types import MetricValue class MetricRepositoryTest(TestCase): diff --git a/app/core/repository/types.py b/app/core/repository/types.py new file mode 100644 index 0000000..91fe045 --- /dev/null +++ b/app/core/repository/types.py @@ -0,0 +1,34 @@ +from typing import Any, Optional + +import importlib +from dataclasses import dataclass +from enum import StrEnum + + +class ResultParserId(StrEnum): + SIMPLE_RESULT_PARSER = ( + "app.clients.influxdb.simple_result_parser.SimpleResultParser" + ) + + def get_by_name(self) -> Any: + if self.name == "SIMPLE_RESULT_PARSER": + return self.instantiate(self.value) + else: + raise Exception(f"Unknown parser for {self}") + + def instantiate(self, clazz: str) -> Any: + idx = clazz.rfind(".") + module, class_name = clazz[:idx], clazz[idx + 1 :] + ClassObj = getattr(importlib.import_module(module), class_name) + return ClassObj() + + +@dataclass +class MetricQuery: + measurement_id: str + subresource: Optional[str] + source: str + unit: str + property: str + query: str + result_parser: ResultParserId diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index f4cdf6c..146781e 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -11,9 +11,8 @@ from app.clients.k8s.k8s_client import ResourceSnapshot from app.clients.k8s.mock_k8s_client import MockK8SClient from app.core.builder.kg_builder import QuerySettings -from app.core.metric_repository import MetricQuery, ResultParserId -from app.core.metric_value import MetricValue -from app.core.types import KGSliceId, MetricSnapshot, SliceInputs +from app.core.repository.types import MetricQuery, ResultParserId +from app.core.types import KGSliceId, MetricSnapshot, MetricValue, SliceInputs from app.kg.graph import Graph from app.kg.id_base import IdBase from app.serialize.jsonld_configuration import JsonLDConfiguration diff --git a/app/core/types.py b/app/core/types.py index 7873f56..4bd2f34 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -1,13 +1,23 @@ -from typing import Any, Dict, List, Set, Tuple +from typing import Any, Dict, List, Set, Tuple, TypeAlias import urllib.parse from dataclasses import dataclass, field from app.clients.k8s.k8s_client import ResourceSnapshot -from app.core.metric_repository import MetricQuery -from app.core.metric_value import MetricValue +from app.core.repository.types import MetricQuery from app.kg.graph import Graph +MetricId: TypeAlias = str +ResourceId: TypeAlias = str + + +@dataclass +class MetricValue: + metric_id: MetricId + resource_id: ResourceId + timestamp: int + value: float + @dataclass(frozen=True) class KGSliceId: diff --git a/app/core/updater/__init__.py b/app/core/updater/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/kg_repository.py b/app/core/updater/kg_repository.py similarity index 96% rename from app/core/kg_repository.py rename to app/core/updater/kg_repository.py index 86d2000..5fd2d54 100644 --- a/app/core/kg_repository.py +++ b/app/core/updater/kg_repository.py @@ -3,8 +3,8 @@ from io import StringIO from app.clients.metadata_service.metadata_service_client import MetadataServiceClient -from app.core.kg_result_parser import KGResultParser from app.core.types import KGSliceId +from app.core.updater.kg_result_parser import KGResultParser from app.kg.graph import Graph from app.serialize.jsonld_configuration import JsonLDConfiguration from app.serialize.jsonld_serializer import JsonLDSerialializer diff --git a/app/core/kg_result_parser.py b/app/core/updater/kg_result_parser.py similarity index 100% rename from app/core/kg_result_parser.py rename to app/core/updater/kg_result_parser.py diff --git a/app/core/kg_tuple_parser.py b/app/core/updater/kg_tuple_parser.py similarity index 83% rename from app/core/kg_tuple_parser.py rename to app/core/updater/kg_tuple_parser.py index e3b1e6d..3c2c820 100644 --- a/app/core/kg_tuple_parser.py +++ b/app/core/updater/kg_tuple_parser.py @@ -1,7 +1,7 @@ from typing import List from app.clients.metadata_service.metadata_service_client import Triple -from app.core.kg_result_parser import KGResultParser +from app.core.updater.kg_result_parser import KGResultParser from app.kg.graph import Graph from app.kg.inmemory_graph import InMemoryGraph diff --git a/app/core/kg_updater.py b/app/core/updater/kg_updater.py similarity index 97% rename from app/core/kg_updater.py rename to app/core/updater/kg_updater.py index 634aaf3..c61c63b 100644 --- a/app/core/kg_updater.py +++ b/app/core/updater/kg_updater.py @@ -5,8 +5,8 @@ from app.core.async_queue import AsyncQueue from app.core.dkg_slice_store import DKGSliceStore -from app.core.kg_repository import KGRepository from app.core.types import DKGSlice +from app.core.updater.kg_repository import KGRepository class KGUpdater: diff --git a/app/core/test_kg_repository.py b/app/core/updater/test_kg_repository.py similarity index 91% rename from app/core/test_kg_repository.py rename to app/core/updater/test_kg_repository.py index 3a682bf..67c6e23 100644 --- a/app/core/test_kg_repository.py +++ b/app/core/updater/test_kg_repository.py @@ -4,10 +4,10 @@ from app.clients.metadata_service.mock_metadata_service_client import ( MockMetadataServiceClient, ) -from app.core.kg_repository import KGRepository -from app.core.kg_tuple_parser import KGTupleParser from app.core.test_graph_fixture import TestGraphFixture from app.core.types import KGSliceId +from app.core.updater.kg_repository import KGRepository +from app.core.updater.kg_tuple_parser import KGTupleParser from app.kg.inmemory_graph import InMemoryGraph diff --git a/app/core/test_kg_tuple_parser.py b/app/core/updater/test_kg_tuple_parser.py similarity index 82% rename from app/core/test_kg_tuple_parser.py rename to app/core/updater/test_kg_tuple_parser.py index c2423f3..5dc3ba9 100644 --- a/app/core/test_kg_tuple_parser.py +++ b/app/core/updater/test_kg_tuple_parser.py @@ -1,6 +1,6 @@ from unittest import TestCase -from app.core.kg_tuple_parser import KGTupleParser +from app.core.updater.kg_tuple_parser import KGTupleParser from app.kg.inmemory_graph import InMemoryGraph diff --git a/app/core/test_kg_updater.py b/app/core/updater/test_kg_updater.py similarity index 94% rename from app/core/test_kg_updater.py rename to app/core/updater/test_kg_updater.py index 1b86ee4..5a5e3b7 100644 --- a/app/core/test_kg_updater.py +++ b/app/core/updater/test_kg_updater.py @@ -7,10 +7,10 @@ SerializedGraph, ) from app.core.async_queue import AsyncQueue -from app.core.kg_repository import KGRepository -from app.core.kg_updater import KGUpdater from app.core.test_graph_fixture import TestGraphFixture from app.core.types import DKGSlice, KGSliceId +from app.core.updater.kg_repository import KGRepository +from app.core.updater.kg_updater import KGUpdater class KGUpdaterTest(TestCase, TestGraphFixture): diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 14c4441..53d1173 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -6,15 +6,15 @@ from loguru import logger from prometheus_client import start_http_server -from app.clients.influxdb.metricstore_client import MetricStoreClient from app.clients.k8s.k8s_client import K8SClient from app.clients.metadata_service.metadata_service_client import MetadataServiceClient from app.core.async_queue import AsyncQueue from app.core.builder.kg_builder import KGBuilder -from app.core.kg_repository import KGRepository -from app.core.kg_updater import KGUpdater -from app.core.metric_repository import MetricRepository +from app.core.repository.metric_repository import MetricRepository +from app.core.repository.metricstore_client import MetricStoreClient from app.core.types import DKGSlice +from app.core.updater.kg_repository import KGRepository +from app.core.updater.kg_updater import KGUpdater from app.kgexporter_settings import KGExporterSettings from app.util.clock import Clock diff --git a/app/transform/metrics/metric_transformer.py b/app/transform/metrics/metric_transformer.py index da3adbd..002fa84 100644 --- a/app/transform/metrics/metric_transformer.py +++ b/app/transform/metrics/metric_transformer.py @@ -1,7 +1,7 @@ from typing import List, Tuple -from app.core.metric_repository import MetricQuery -from app.core.metric_value import MetricValue +from app.core.repository.types import MetricQuery +from app.core.types import MetricValue from app.kg.graph import Graph from app.transform.k8s.transformation_context import TransformationContext diff --git a/app/transform/metrics/node_metric_transformer.py b/app/transform/metrics/node_metric_transformer.py index 8e8b9a9..9f4f9ee 100644 --- a/app/transform/metrics/node_metric_transformer.py +++ b/app/transform/metrics/node_metric_transformer.py @@ -1,7 +1,7 @@ from typing import List, Tuple -from app.core.metric_repository import MetricQuery -from app.core.metric_value import MetricValue +from app.core.repository.types import MetricQuery +from app.core.types import MetricValue from app.kg.graph import Graph from app.kg.iri import IRI from app.transform.k8s.transformation_context import TransformationContext diff --git a/app/transform/metrics/pod_metric_transformer.py b/app/transform/metrics/pod_metric_transformer.py index a2ba254..bfb8ccf 100644 --- a/app/transform/metrics/pod_metric_transformer.py +++ b/app/transform/metrics/pod_metric_transformer.py @@ -1,7 +1,7 @@ from typing import List, Tuple -from app.core.metric_repository import MetricQuery -from app.core.metric_value import MetricValue +from app.core.repository.types import MetricQuery +from app.core.types import MetricValue from app.kg.graph import Graph from app.kg.iri import IRI from app.transform.k8s.transformation_context import TransformationContext diff --git a/app/transform/metrics/test_node_metric_transformer.py b/app/transform/metrics/test_node_metric_transformer.py index 61e62c5..072d023 100644 --- a/app/transform/metrics/test_node_metric_transformer.py +++ b/app/transform/metrics/test_node_metric_transformer.py @@ -1,8 +1,8 @@ import json from io import StringIO -from app.core.metric_repository import MetricQuery, ResultParserId -from app.core.metric_value import MetricValue +from app.core.repository.types import MetricQuery, ResultParserId +from app.core.types import MetricValue from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer diff --git a/app/transform/metrics/test_pod_metric_transformer.py b/app/transform/metrics/test_pod_metric_transformer.py index ae4a0a1..185f16a 100644 --- a/app/transform/metrics/test_pod_metric_transformer.py +++ b/app/transform/metrics/test_pod_metric_transformer.py @@ -1,8 +1,8 @@ import json from io import StringIO -from app.core.metric_repository import MetricQuery, ResultParserId -from app.core.metric_value import MetricValue +from app.core.repository.types import MetricQuery, ResultParserId +from app.core.types import MetricValue from app.kg.inmemory_graph import InMemoryGraph from app.serialize.jsonld_serializer import JsonLDSerialializer from app.serialize.turtle_serializer import TurtleSerialializer From 3a126e70954d1eb20478f3ab8ae8159716c5f304 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 28 Jun 2024 13:51:24 +0200 Subject: [PATCH 58/61] HHT-669: refactoring and cleanup --- app/clients/k8s/k8s_client.py | 7 +------ app/core/builder/kg_builder.py | 8 ++++---- .../{single_slice_strategy.py => slice_per_cluster.py} | 2 +- .../{slice_for_node_strategy.py => slice_per_node.py} | 3 +-- ...gle_slice_strategy.py => test_slice_per_cluster.py} | 10 +++++----- ...ice_for_node_strategy.py => test_slice_per_node.py} | 10 +++++----- charts/app/values.yaml | 1 - etc/config.yaml | 1 - 8 files changed, 17 insertions(+), 25 deletions(-) rename app/core/builder/slice_strategy/{single_slice_strategy.py => slice_per_cluster.py} (95%) rename app/core/builder/slice_strategy/{slice_for_node_strategy.py => slice_per_node.py} (98%) rename app/core/builder/slice_strategy/{test_single_slice_strategy.py => test_slice_per_cluster.py} (89%) rename app/core/builder/slice_strategy/{test_slice_for_node_strategy.py => test_slice_per_node.py} (92%) diff --git a/app/clients/k8s/k8s_client.py b/app/clients/k8s/k8s_client.py index e636875..e3200ce 100644 --- a/app/clients/k8s/k8s_client.py +++ b/app/clients/k8s/k8s_client.py @@ -3,8 +3,6 @@ import asyncio from dataclasses import dataclass, field -from jsonpath_ng.ext import parse - @dataclass class ResourceSnapshot: @@ -54,11 +52,8 @@ def find_resources_by_kind_and_identity( else: return [] - # TODO remove def get_resource_name(self, node: Dict[str, Any]) -> str: - for match in parse("$.metadata.name").find(node): - return str(match.value) - raise Exception("Metadata does not contain name.") + return node["metadata"]["name"] # type: ignore def get_resources_by_kind(self, kind: str) -> Optional[List[Dict[str, Any]]]: if kind == "Pod": diff --git a/app/core/builder/kg_builder.py b/app/core/builder/kg_builder.py index 5076853..e4ff08e 100644 --- a/app/core/builder/kg_builder.py +++ b/app/core/builder/kg_builder.py @@ -9,8 +9,8 @@ from app.clients.k8s.k8s_client import K8SClient from app.core.async_queue import AsyncQueue from app.core.builder.kg_slice_assembler import KGSliceAssembler -from app.core.builder.slice_strategy.single_slice_strategy import SingleSliceStrategy -from app.core.builder.slice_strategy.slice_for_node_strategy import SliceForNodeStrategy +from app.core.builder.slice_strategy.slice_per_cluster import SlicePerCluster +from app.core.builder.slice_strategy.slice_per_node import SlicePerNode from app.core.builder.slice_strategy.slice_strategy import SliceStrategy from app.core.repository.metric_repository import MetricRepository from app.core.repository.types import MetricQuery @@ -69,9 +69,9 @@ def __init__( self.influxdb_repository = influxdb_repository self.settings = settings self.slice_strategy = ( - SingleSliceStrategy(settings.single_slice_url) + SlicePerCluster(settings.single_slice_url) if settings.is_single_slice - else SliceForNodeStrategy(node_port=settings.node_port) + else SlicePerNode(node_port=settings.node_port) ) self.slice_assembler = KGSliceAssembler() diff --git a/app/core/builder/slice_strategy/single_slice_strategy.py b/app/core/builder/slice_strategy/slice_per_cluster.py similarity index 95% rename from app/core/builder/slice_strategy/single_slice_strategy.py rename to app/core/builder/slice_strategy/slice_per_cluster.py index 6fe74ff..e43aa16 100644 --- a/app/core/builder/slice_strategy/single_slice_strategy.py +++ b/app/core/builder/slice_strategy/slice_per_cluster.py @@ -7,7 +7,7 @@ from app.core.types import KGSliceId, MetricSnapshot, SliceInputs -class SingleSliceStrategy(SliceStrategy): +class SlicePerCluster(SliceStrategy): metadata_host: str metadata_port: int diff --git a/app/core/builder/slice_strategy/slice_for_node_strategy.py b/app/core/builder/slice_strategy/slice_per_node.py similarity index 98% rename from app/core/builder/slice_strategy/slice_for_node_strategy.py rename to app/core/builder/slice_strategy/slice_per_node.py index 3b98177..90e0325 100644 --- a/app/core/builder/slice_strategy/slice_for_node_strategy.py +++ b/app/core/builder/slice_strategy/slice_per_node.py @@ -10,8 +10,7 @@ ReferenceKind: TypeAlias = str -# TODO rename -class SliceForNodeStrategy(SliceStrategy): +class SlicePerNode(SliceStrategy): node_port: int def __init__(self, node_port: int): diff --git a/app/core/builder/slice_strategy/test_single_slice_strategy.py b/app/core/builder/slice_strategy/test_slice_per_cluster.py similarity index 89% rename from app/core/builder/slice_strategy/test_single_slice_strategy.py rename to app/core/builder/slice_strategy/test_slice_per_cluster.py index 1d67bd1..255f712 100644 --- a/app/core/builder/slice_strategy/test_single_slice_strategy.py +++ b/app/core/builder/slice_strategy/test_slice_per_cluster.py @@ -1,13 +1,13 @@ from unittest import TestCase -from app.core.builder.slice_strategy.single_slice_strategy import SingleSliceStrategy +from app.core.builder.slice_strategy.slice_per_cluster import SlicePerCluster from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId -class SingleSliceStrategyTest(TestCase, SnapshotTestBase): +class SlicePerClusterTest(TestCase, SnapshotTestBase): def test_split_empty(self) -> None: - strategy = SingleSliceStrategy(metadata_url="http://metadata-service:80") + strategy = SlicePerCluster(metadata_url="http://metadata-service:80") resources = self.load_k8s_snapshot("empty") metrics = self.load_metric_snapshot("empty") actual = strategy.get_slices(resources, metrics) @@ -22,7 +22,7 @@ def test_split_empty(self) -> None: self.assertEqual(actual_metric_names, set()) def test_split_minimal(self) -> None: - strategy = SingleSliceStrategy(metadata_url="http://metadata-service:80") + strategy = SlicePerCluster(metadata_url="http://metadata-service:80") resources = self.load_k8s_snapshot("minimal") metrics = self.load_metric_snapshot("minimal") actual = strategy.get_slices(resources, metrics) @@ -58,7 +58,7 @@ def test_split_minimal(self) -> None: ) def test_split_multinode(self) -> None: - strategy = SingleSliceStrategy(metadata_url="http://metadata-service:80") + strategy = SlicePerCluster(metadata_url="http://metadata-service:80") resources = self.load_k8s_snapshot("multinode") metrics = self.load_metric_snapshot("multinode") actual = strategy.get_slices(resources, metrics) diff --git a/app/core/builder/slice_strategy/test_slice_for_node_strategy.py b/app/core/builder/slice_strategy/test_slice_per_node.py similarity index 92% rename from app/core/builder/slice_strategy/test_slice_for_node_strategy.py rename to app/core/builder/slice_strategy/test_slice_per_node.py index 678219b..2dec722 100644 --- a/app/core/builder/slice_strategy/test_slice_for_node_strategy.py +++ b/app/core/builder/slice_strategy/test_slice_per_node.py @@ -1,20 +1,20 @@ from unittest import TestCase -from app.core.builder.slice_strategy.slice_for_node_strategy import SliceForNodeStrategy +from app.core.builder.slice_strategy.slice_per_node import SlicePerNode from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId -class SliceForNodeStrategyTest(TestCase, SnapshotTestBase): +class SlicePerNodeTest(TestCase, SnapshotTestBase): def test_split_empty(self) -> None: - strategy = SliceForNodeStrategy(node_port=80) + strategy = SlicePerNode(node_port=80) resources = self.load_k8s_snapshot("empty") metrics = self.load_metric_snapshot("empty") actual = strategy.get_slices(resources, metrics) self.assertEqual({}, actual) def test_split_minimal(self) -> None: - strategy = SliceForNodeStrategy(node_port=80) + strategy = SlicePerNode(node_port=80) resources = self.load_k8s_snapshot("minimal") metrics = self.load_metric_snapshot("minimal") actual = strategy.get_slices(resources, metrics) @@ -50,7 +50,7 @@ def test_split_minimal(self) -> None: ) def test_split_multinode(self) -> None: - strategy = SliceForNodeStrategy(node_port=80) + strategy = SlicePerNode(node_port=80) resources = self.load_k8s_snapshot("multinode") metrics = self.load_metric_snapshot("multinode") actual = strategy.get_slices(resources, metrics) diff --git a/charts/app/values.yaml b/charts/app/values.yaml index 0f22bcf..cac8dca 100644 --- a/charts/app/values.yaml +++ b/charts/app/values.yaml @@ -37,7 +37,6 @@ settings: queries: node_queries: [] pod_queries: [] - workload_queries: [] influxdb: org: org timeout: 60000 diff --git a/etc/config.yaml b/etc/config.yaml index 5d53b2c..b2182ff 100644 --- a/etc/config.yaml +++ b/etc/config.yaml @@ -6,7 +6,6 @@ builder: queries: node_queries: [] pod_queries: [] - workload_queries: [] influxdb: org: org timeout: 60000 From 8c3a186c0b5cea0f8aa23152a6c4ffe16af3d0fb Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 9 Jul 2024 14:03:59 +0200 Subject: [PATCH 59/61] HHT-669: kubernetes async client --- app/clients/k8s/k8s_client_impl.py | 82 +++- app/transform/k8s/transformer_base.py | 2 +- app/transform/k8s/workload_transformer.py | 3 +- app/util/quantity.py | 88 ++++ poetry.lock | 531 ++++++++++++---------- pyproject.toml | 1 + 6 files changed, 435 insertions(+), 272 deletions(-) create mode 100644 app/util/quantity.py diff --git a/app/clients/k8s/k8s_client_impl.py b/app/clients/k8s/k8s_client_impl.py index a652f68..f85ec93 100644 --- a/app/clients/k8s/k8s_client_impl.py +++ b/app/clients/k8s/k8s_client_impl.py @@ -1,24 +1,24 @@ -from typing import Any, Dict, List +from typing import Any, Callable, Coroutine, Dict, List, Optional, TypeVar -from kubernetes import client, config, dynamic -from kubernetes.client import api_client +from kubernetes_asyncio import config +from kubernetes_asyncio.client import ApiClient +from kubernetes_asyncio.client.api.core_api import CoreApi +from kubernetes_asyncio.client.configuration import Configuration +from kubernetes_asyncio.dynamic import DynamicClient from app.clients.k8s.k8s_client import K8SClient from app.clients.k8s.k8s_settings import K8SSettings +T = TypeVar("T") + class K8SClientImpl(K8SClient): - client: dynamic.DynamicClient + settings: K8SSettings + configuration: Optional[Configuration] def __init__(self, settings: K8SSettings): - configuration = ( - config.load_incluster_config() - if settings.in_cluster - else config.load_kube_config() - ) - self.client = dynamic.DynamicClient( - api_client.ApiClient(configuration=configuration) - ) + self.settings = settings + self.configuration = None async def get_nodes(self) -> List[Dict[str, Any]]: return await self.get_resource("Node") @@ -42,20 +42,52 @@ async def get_jobs(self) -> List[Dict[str, Any]]: return await self.get_resource("Job") async def get_cluster_info(self) -> Dict[str, Any]: - configmap_api = self.client.resources.get(api_version="v1", kind="ConfigMap") - results = configmap_api.get(namespace="kube-system", name="kubeadm-config") - if results: - return results.to_dict() # type: ignore - else: - return {} + async def get_cluster_info_internal( + dyn_client: DynamicClient, + ) -> Dict[str, Any]: + configmap_api = await dyn_client.resources.get( + api_version="v1", kind="ConfigMap" + ) + results = await configmap_api.get( + namespace="kube-system", name="kubeadm-config" + ) + if results: + return results.to_dict() # type: ignore + else: + return {} + + return await self.execute(get_cluster_info_internal) async def get_api_versions(self) -> Dict[str, Any]: - versions = client.CoreApi().get_api_versions() - if versions: - return versions.to_dict() # type: ignore - else: - return {} + async def get_api_versions_internal( + dyn_client: DynamicClient, + ) -> Dict[str, Any]: + versions = await CoreApi(dyn_client.client).get_api_versions() + if versions: + return versions.to_dict() # type: ignore + else: + return {} + + return await self.execute(get_api_versions_internal) async def get_resource(self, kind: str) -> List[Dict[str, Any]]: - api = self.client.resources.get(api_version="v1", kind=kind) - return [item.to_dict() for item in api.get().items] + async def get_resource_internal(client: DynamicClient) -> List[Dict[str, Any]]: + api = await client.resources.get(api_version="v1", kind=kind) + result = await api.get() + return [item.to_dict() for item in result.items] + + return await self.execute(get_resource_internal) + + async def execute( + self, func: Callable[[DynamicClient], Coroutine[Any, Any, T]] + ) -> T: + if not self.configuration: + self.configuration = Configuration() + if self.settings.in_cluster: + config.load_incluster_config(client_configuration=self.configuration) + else: + await config.load_kube_config(client_configuration=self.configuration) + + async with ApiClient(configuration=self.configuration) as client_api: + async with DynamicClient(client_api) as dynamic_api: + return await func(dynamic_api) diff --git a/app/transform/k8s/transformer_base.py b/app/transform/k8s/transformer_base.py index d49f723..28f7ab3 100644 --- a/app/transform/k8s/transformer_base.py +++ b/app/transform/k8s/transformer_base.py @@ -3,13 +3,13 @@ import re from jsonpath_ng.ext import parse -from kubernetes.utils.quantity import parse_quantity from app.kg.graph import Graph from app.kg.iri import IRI from app.kg.literal import Literal from app.transform.k8s.transformation_context import TransformationContext from app.transform.k8s.upper_ontology_base import UpperOntologyBase +from app.util.quantity import parse_quantity class TransformerBase: diff --git a/app/transform/k8s/workload_transformer.py b/app/transform/k8s/workload_transformer.py index f5faed8..dee1918 100644 --- a/app/transform/k8s/workload_transformer.py +++ b/app/transform/k8s/workload_transformer.py @@ -1,12 +1,11 @@ from typing import Any, Dict, List, Optional -from kubernetes.utils.quantity import parse_quantity - from app.kg.graph import Graph from app.kg.iri import IRI from app.transform.k8s.transformation_context import TransformationContext from app.transform.k8s.transformer_base import TransformerBase from app.transform.k8s.upper_ontology_base import UpperOntologyBase +from app.util.quantity import parse_quantity class WorkloadToRDFTransformer(TransformerBase, UpperOntologyBase): diff --git a/app/util/quantity.py b/app/util/quantity.py new file mode 100644 index 0000000..d361fa4 --- /dev/null +++ b/app/util/quantity.py @@ -0,0 +1,88 @@ +# This file is copied from https://github.com/kubernetes-client/python/blob/master/kubernetes/utils/quantity.py +# because kubernetes_asyncio client does not contain this function +# +# Copyright 2019 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from decimal import Decimal, InvalidOperation + + +def parse_quantity(quantity): + """ + Parse kubernetes canonical form quantity like 200Mi to a decimal number. + Supported SI suffixes: + base1024: Ki | Mi | Gi | Ti | Pi | Ei + base1000: n | u | m | "" | k | M | G | T | P | E + + See https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go + + Input: + quantity: string. kubernetes canonical form quantity + + Returns: + Decimal + + Raises: + ValueError on invalid or unknown input + """ + if isinstance(quantity, (int, float, Decimal)): + return Decimal(quantity) + + exponents = { + "n": -3, + "u": -2, + "m": -1, + "K": 1, + "k": 1, + "M": 2, + "G": 3, + "T": 4, + "P": 5, + "E": 6, + } + + quantity = str(quantity) + number = quantity + suffix = None + if len(quantity) >= 2 and quantity[-1] == "i": + if quantity[-2] in exponents: + number = quantity[:-2] + suffix = quantity[-2:] + elif len(quantity) >= 1 and quantity[-1] in exponents: + number = quantity[:-1] + suffix = quantity[-1:] + + try: + number = Decimal(number) + except InvalidOperation: + raise ValueError("Invalid number format: {}".format(number)) + + if suffix is None: + return number + + if suffix.endswith("i"): + base = 1024 + elif len(suffix) == 1: + base = 1000 + else: + raise ValueError("{} has unknown suffix".format(quantity)) + + # handle SI inconsistency + if suffix == "ki": + raise ValueError("{} has unknown suffix".format(quantity)) + + if suffix[0] not in exponents: + raise ValueError("{} has unknown suffix".format(quantity)) + + exponent = Decimal(exponents[suffix[0]]) + return number * (base**exponent) diff --git a/poetry.lock b/poetry.lock index c9e40a4..993c70d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -236,13 +236,13 @@ files = [ [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -525,53 +525,53 @@ pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "fonttools" -version = "4.53.0" +version = "4.53.1" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.53.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20"}, - {file = "fonttools-4.53.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d"}, - {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6"}, - {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5"}, - {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9"}, - {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca"}, - {file = "fonttools-4.53.0-cp310-cp310-win32.whl", hash = "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068"}, - {file = "fonttools-4.53.0-cp310-cp310-win_amd64.whl", hash = "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68"}, - {file = "fonttools-4.53.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec"}, - {file = "fonttools-4.53.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749"}, - {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161"}, - {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee"}, - {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6"}, - {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e"}, - {file = "fonttools-4.53.0-cp311-cp311-win32.whl", hash = "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005"}, - {file = "fonttools-4.53.0-cp311-cp311-win_amd64.whl", hash = "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796"}, - {file = "fonttools-4.53.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a"}, - {file = "fonttools-4.53.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4"}, - {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06"}, - {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d"}, - {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109"}, - {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2"}, - {file = "fonttools-4.53.0-cp312-cp312-win32.whl", hash = "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea"}, - {file = "fonttools-4.53.0-cp312-cp312-win_amd64.whl", hash = "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380"}, - {file = "fonttools-4.53.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12"}, - {file = "fonttools-4.53.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4"}, - {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce"}, - {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f"}, - {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206"}, - {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd"}, - {file = "fonttools-4.53.0-cp38-cp38-win32.whl", hash = "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af"}, - {file = "fonttools-4.53.0-cp38-cp38-win_amd64.whl", hash = "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b"}, - {file = "fonttools-4.53.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac"}, - {file = "fonttools-4.53.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d"}, - {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1"}, - {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f"}, - {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64"}, - {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0"}, - {file = "fonttools-4.53.0-cp39-cp39-win32.whl", hash = "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9"}, - {file = "fonttools-4.53.0-cp39-cp39-win_amd64.whl", hash = "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2"}, - {file = "fonttools-4.53.0-py3-none-any.whl", hash = "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4"}, - {file = "fonttools-4.53.0.tar.gz", hash = "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002"}, + {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, + {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, + {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, + {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, + {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, + {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, + {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, + {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, + {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, + {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, + {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, + {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, + {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, + {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, ] [package.extras] @@ -676,13 +676,13 @@ files = [ [[package]] name = "google-auth" -version = "2.30.0" +version = "2.32.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"}, - {file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"}, + {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, + {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, ] [package.dependencies] @@ -769,13 +769,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "identify" -version = "2.5.36" +version = "2.6.0" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, ] [package.extras] @@ -1007,6 +1007,25 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "kubernetes-asyncio" +version = "30.1.1" +description = "Kubernetes asynchronous python client" +optional = false +python-versions = "*" +files = [ + {file = "kubernetes_asyncio-30.1.1-py3-none-any.whl", hash = "sha256:3bb40d906ba37f5553bbf0ee9b69947bf14b93c481ed69e2a5ab02aa6ded33d7"}, + {file = "kubernetes_asyncio-30.1.1.tar.gz", hash = "sha256:7523f8650bedb0c9cf5264f2b043ee94fab9b0d29a142c63d59d435bd9df66d7"}, +] + +[package.dependencies] +aiohttp = ">=3.9.0,<4.0.0" +certifi = ">=14.05.14" +python-dateutil = ">=2.5.3" +pyyaml = ">=3.12" +six = ">=1.9.0" +urllib3 = ">=1.24.2" + [[package]] name = "loguru" version = "0.7.2" @@ -1027,40 +1046,40 @@ dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptio [[package]] name = "matplotlib" -version = "3.9.0" +version = "3.9.1" description = "Python plotting package" optional = false python-versions = ">=3.9" files = [ - {file = "matplotlib-3.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56"}, - {file = "matplotlib-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b"}, - {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"}, - {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d"}, - {file = "matplotlib-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4"}, - {file = "matplotlib-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463"}, - {file = "matplotlib-3.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38"}, - {file = "matplotlib-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152"}, - {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85"}, - {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb"}, - {file = "matplotlib-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674"}, - {file = "matplotlib-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be"}, - {file = "matplotlib-3.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382"}, - {file = "matplotlib-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84"}, - {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5"}, - {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db"}, - {file = "matplotlib-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7"}, - {file = "matplotlib-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf"}, - {file = "matplotlib-3.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956"}, - {file = "matplotlib-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a"}, - {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321"}, - {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89"}, - {file = "matplotlib-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b"}, - {file = "matplotlib-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e"}, - {file = "matplotlib-3.9.0.tar.gz", hash = "sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a"}, + {file = "matplotlib-3.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7ccd6270066feb9a9d8e0705aa027f1ff39f354c72a87efe8fa07632f30fc6bb"}, + {file = "matplotlib-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:591d3a88903a30a6d23b040c1e44d1afdd0d778758d07110eb7596f811f31842"}, + {file = "matplotlib-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2a59ff4b83d33bca3b5ec58203cc65985367812cb8c257f3e101632be86d92"}, + {file = "matplotlib-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fc001516ffcf1a221beb51198b194d9230199d6842c540108e4ce109ac05cc0"}, + {file = "matplotlib-3.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:83c6a792f1465d174c86d06f3ae85a8fe36e6f5964633ae8106312ec0921fdf5"}, + {file = "matplotlib-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:421851f4f57350bcf0811edd754a708d2275533e84f52f6760b740766c6747a7"}, + {file = "matplotlib-3.9.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b3fce58971b465e01b5c538f9d44915640c20ec5ff31346e963c9e1cd66fa812"}, + {file = "matplotlib-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a973c53ad0668c53e0ed76b27d2eeeae8799836fd0d0caaa4ecc66bf4e6676c0"}, + {file = "matplotlib-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd5acf8f3ef43f7532c2f230249720f5dc5dd40ecafaf1c60ac8200d46d7eb"}, + {file = "matplotlib-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab38a4f3772523179b2f772103d8030215b318fef6360cb40558f585bf3d017f"}, + {file = "matplotlib-3.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2315837485ca6188a4b632c5199900e28d33b481eb083663f6a44cfc8987ded3"}, + {file = "matplotlib-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0c977c5c382f6696caf0bd277ef4f936da7e2aa202ff66cad5f0ac1428ee15b"}, + {file = "matplotlib-3.9.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:565d572efea2b94f264dd86ef27919515aa6d629252a169b42ce5f570db7f37b"}, + {file = "matplotlib-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d397fd8ccc64af2ec0af1f0efc3bacd745ebfb9d507f3f552e8adb689ed730a"}, + {file = "matplotlib-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26040c8f5121cd1ad712abffcd4b5222a8aec3a0fe40bc8542c94331deb8780d"}, + {file = "matplotlib-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12cb1837cffaac087ad6b44399d5e22b78c729de3cdae4629e252067b705e2b"}, + {file = "matplotlib-3.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0e835c6988edc3d2d08794f73c323cc62483e13df0194719ecb0723b564e0b5c"}, + {file = "matplotlib-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:44a21d922f78ce40435cb35b43dd7d573cf2a30138d5c4b709d19f00e3907fd7"}, + {file = "matplotlib-3.9.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0c584210c755ae921283d21d01f03a49ef46d1afa184134dd0f95b0202ee6f03"}, + {file = "matplotlib-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11fed08f34fa682c2b792942f8902e7aefeed400da71f9e5816bea40a7ce28fe"}, + {file = "matplotlib-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0000354e32efcfd86bda75729716b92f5c2edd5b947200be9881f0a671565c33"}, + {file = "matplotlib-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db17fea0ae3aceb8e9ac69c7e3051bae0b3d083bfec932240f9bf5d0197a049"}, + {file = "matplotlib-3.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:208cbce658b72bf6a8e675058fbbf59f67814057ae78165d8a2f87c45b48d0ff"}, + {file = "matplotlib-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:dc23f48ab630474264276be156d0d7710ac6c5a09648ccdf49fef9200d8cbe80"}, + {file = "matplotlib-3.9.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3fda72d4d472e2ccd1be0e9ccb6bf0d2eaf635e7f8f51d737ed7e465ac020cb3"}, + {file = "matplotlib-3.9.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:84b3ba8429935a444f1fdc80ed930babbe06725bcf09fbeb5c8757a2cd74af04"}, + {file = "matplotlib-3.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b918770bf3e07845408716e5bbda17eadfc3fcbd9307dc67f37d6cf834bb3d98"}, + {file = "matplotlib-3.9.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f1f2e5d29e9435c97ad4c36fb6668e89aee13d48c75893e25cef064675038ac9"}, + {file = "matplotlib-3.9.1.tar.gz", hash = "sha256:de06b19b8db95dd33d0dc17c926c7c9ebed9f572074b6fac4f65068a6814d010"}, ] [package.dependencies] @@ -1421,84 +1440,95 @@ files = [ [[package]] name = "pillow" -version = "10.3.0" +version = "10.4.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, - {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, - {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, - {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, - {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, - {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, - {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, - {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, - {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, - {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, - {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, - {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, - {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, - {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, - {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, - {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, - {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -1617,109 +1647,122 @@ files = [ [[package]] name = "pydantic" -version = "2.7.4" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -2077,18 +2120,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "70.1.1" +version = "70.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"}, - {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"}, + {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, + {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2443,4 +2486,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "729520443c40116949aa6713601de92af4fd5ae6c7dad9c729705c4f7cab6b24" +content-hash = "74388b3ae0459374c608e2e4b4ca682d7830b351609322453be4c51bbe38a93f" diff --git a/pyproject.toml b/pyproject.toml index f477969..8d5d4b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ aiohttp = "3.9.3" loguru = "0.7.2" prometheus-client = "0.20.0" aioprometheus-api-client = "0.0.2" +kubernetes_asyncio = "30.1.1" [tool.poetry.group.dev.dependencies] black = "^23.12" From c32e41283eaa5f5200b009e0599b52bfc0e1531d Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Tue, 9 Jul 2024 16:01:00 +0200 Subject: [PATCH 60/61] HHT-669: using prometheus as metric store --- app/clients/influxdb/__init__.py | 0 app/clients/influxdb/influxdb_client_impl.py | 40 --------------- app/clients/influxdb/influxdb_settings.py | 8 --- app/clients/influxdb/mock_infuxdbclient.py | 18 ------- app/clients/influxdb/simple_result_parser.py | 21 -------- .../influxdb/test_simple_result_parser.py | 50 ------------------- .../snapshot/minimal/metric_nodes.yaml | 8 +-- .../snapshot/minimal/metric_pods.yaml | 8 +-- .../snapshot/multinode/metric_nodes.yaml | 18 +++---- .../snapshot/multinode/metric_pods.yaml | 32 ++++++------ app/core/builder/kg_builder.py | 31 +++--------- app/core/builder/test_kg_builder.py | 13 ++--- app/core/repository/metric_repository.py | 26 +++++++--- app/core/repository/query_settings.py | 10 ++++ app/core/repository/test_metric_repository.py | 20 ++++---- app/core/repository/test_result_parser_id.py | 10 ++++ app/core/repository/types.py | 10 ++-- app/core/test_snapshot_base.py | 12 ++--- app/kgexporter_settings.py | 2 - app/test_kgexporter_context.py | 19 +++---- app/test_pydantic_yaml.py | 7 +-- .../metrics/test_node_metric_transformer.py | 4 +- .../metrics/test_pod_metric_transformer.py | 4 +- charts/app/values.yaml | 5 -- etc/config.yaml | 17 ++++--- 25 files changed, 133 insertions(+), 260 deletions(-) delete mode 100644 app/clients/influxdb/__init__.py delete mode 100644 app/clients/influxdb/influxdb_client_impl.py delete mode 100644 app/clients/influxdb/influxdb_settings.py delete mode 100644 app/clients/influxdb/mock_infuxdbclient.py delete mode 100644 app/clients/influxdb/simple_result_parser.py delete mode 100644 app/clients/influxdb/test_simple_result_parser.py create mode 100644 app/core/repository/query_settings.py create mode 100644 app/core/repository/test_result_parser_id.py diff --git a/app/clients/influxdb/__init__.py b/app/clients/influxdb/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/clients/influxdb/influxdb_client_impl.py b/app/clients/influxdb/influxdb_client_impl.py deleted file mode 100644 index 0cfc8fb..0000000 --- a/app/clients/influxdb/influxdb_client_impl.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import List - -from influxdb_client.client.flux_table import TableList -from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync - -from app.clients.influxdb.influxdb_settings import InfluxDBSettings -from app.core.repository.metricstore_client import MetricStoreClient -from app.core.repository.query_result_parser import QueryResultParser -from app.core.types import MetricValue - - -class InfluxDBClientImpl(MetricStoreClient): - settings: InfluxDBSettings - async_client: InfluxDBClientAsync - - def __init__(self, settings: InfluxDBSettings): - self.settings = settings - - async def query( - self, query: str, result_parser: QueryResultParser - ) -> List[MetricValue]: - async with InfluxDBClientAsync( - url=self.settings.url, - token=self.settings.token, - org=self.settings.org, - timeout=self.settings.timeout, - ) as async_client: - flux_result = await async_client.query_api().query(query) - return self.parse_response(flux_result, result_parser) - - def parse_response( - self, - flux_result: TableList, - result_parser: QueryResultParser, - ) -> List[MetricValue]: - query_results = [] - for table in flux_result: - for record in table.records: - query_results.extend(result_parser.parse(record)) - return query_results diff --git a/app/clients/influxdb/influxdb_settings.py b/app/clients/influxdb/influxdb_settings.py deleted file mode 100644 index 2f82674..0000000 --- a/app/clients/influxdb/influxdb_settings.py +++ /dev/null @@ -1,8 +0,0 @@ -from pydantic_settings import BaseSettings - - -class InfluxDBSettings(BaseSettings): - url: str - token: str - org: str - timeout: int diff --git a/app/clients/influxdb/mock_infuxdbclient.py b/app/clients/influxdb/mock_infuxdbclient.py deleted file mode 100644 index b24cb45..0000000 --- a/app/clients/influxdb/mock_infuxdbclient.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Dict, List - -from app.core.repository.metricstore_client import MetricStoreClient -from app.core.repository.query_result_parser import QueryResultParser -from app.core.types import MetricValue - - -class MockInfluxDBClient(MetricStoreClient): - results: Dict[str, List[MetricValue]] - - def __init__(self): - self.results = dict() - - def mock_query(self, query: str, client_response: List[MetricValue]) -> None: - self.results[query] = client_response - - async def query(self, query: str, _: QueryResultParser) -> List[MetricValue]: - return self.results.get(query) or [] diff --git a/app/clients/influxdb/simple_result_parser.py b/app/clients/influxdb/simple_result_parser.py deleted file mode 100644 index 9b32498..0000000 --- a/app/clients/influxdb/simple_result_parser.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Any, Dict, List - -from app.core.repository.query_result_parser import QueryResultParser -from app.core.types import MetricValue - - -class SimpleResultParser(QueryResultParser): - METRICID_FIELD: str = "metric_id" - RESOURCEID_FIELD: str = "resource_id" - TIMESTAMP_FIELD: str = "timestamp" - VALUE_FIELD: str = "value" - - def parse(self, row: Dict[str, Any]) -> List[MetricValue]: - return [ - MetricValue( - row[self.METRICID_FIELD], - row[self.RESOURCEID_FIELD], - self.get_timestamp(row[self.TIMESTAMP_FIELD]), - self.get_float(row[self.VALUE_FIELD]), - ) - ] diff --git a/app/clients/influxdb/test_simple_result_parser.py b/app/clients/influxdb/test_simple_result_parser.py deleted file mode 100644 index 2a03d02..0000000 --- a/app/clients/influxdb/test_simple_result_parser.py +++ /dev/null @@ -1,50 +0,0 @@ -import datetime -from unittest import TestCase - -from dateutil.tz import tzutc - -from app.clients.influxdb.simple_result_parser import SimpleResultParser -from app.core.types import MetricValue - - -class SimpleResultParserTest(TestCase): - def test_parse(self) -> None: - parser = SimpleResultParser() - - row = { - "result": "_result", - "table": 0, - "_start": datetime.datetime(2024, 5, 31, 7, 53, 2, 452746, tzinfo=tzutc()), - "_stop": datetime.datetime(2024, 5, 31, 8, 13, 2, 452746, tzinfo=tzutc()), - "timestamp": datetime.datetime(2024, 5, 31, 8, 0, tzinfo=tzutc()), - "value": 26237685760.0, - "_field": "node_memory_MemFree_bytes", - "_measurement": "prometheus_remote_write", - "app_kubernetes_io_component": "metrics", - "app_kubernetes_io_instance": "monitoring-stack", - "app_kubernetes_io_managed_by": "Helm", - "app_kubernetes_io_name": "prometheus-node-exporter", - "app_kubernetes_io_part_of": "prometheus-node-exporter", - "app_kubernetes_io_version": "1.7.0", - "helm_sh_chart": "prometheus-node-exporter-4.25.0", - "host": "telegraf-polling-service", - "instance": "10.14.1.160:9100", - "job": "kubernetes-service-endpoints", - "namespace": "monitoring", - "resource_id": "glaciation-testm1w5-master01", - "metric_id": "RAM.Capacity", - "service": "monitoring-stack-prometheus-node-exporter", - } - actual = parser.parse(row) - - self.assertEqual( - [ - MetricValue( - "RAM.Capacity", - "glaciation-testm1w5-master01", - 1717142400000, - 26237685760.0, - ) - ], - actual, - ) diff --git a/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml b/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml index 8ac25ba..fcd38e0 100644 --- a/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml +++ b/app/core/__fixture__/snapshot/minimal/metric_nodes.yaml @@ -5,7 +5,7 @@ unit: coreseconds property: CPU.Usage query: test1 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: cpu_usage resource_id: glaciation-test-master01 @@ -18,7 +18,7 @@ unit: bytes property: RAM.Usage query: test2 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: ram_usage resource_id: glaciation-test-master01 @@ -31,7 +31,7 @@ unit: bytes property: Storage.Usage query: test3 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: eph_usage resource_id: glaciation-test-master01 @@ -44,7 +44,7 @@ unit: bytes property: Network.Usage query: test4 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: net_usage resource_id: glaciation-test-master01 diff --git a/app/core/__fixture__/snapshot/minimal/metric_pods.yaml b/app/core/__fixture__/snapshot/minimal/metric_pods.yaml index c54a9a9..95e0bb3 100644 --- a/app/core/__fixture__/snapshot/minimal/metric_pods.yaml +++ b/app/core/__fixture__/snapshot/minimal/metric_pods.yaml @@ -5,7 +5,7 @@ unit: coreseconds property: CPU.Usage query: test5 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_cpu_usage resource_id: kube-system.coredns-787d4945fb-l85r5 @@ -18,7 +18,7 @@ unit: bytes property: RAM.Usage query: test6 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_ram_usage resource_id: kube-system.coredns-787d4945fb-l85r5 @@ -31,7 +31,7 @@ unit: bytes property: Storage.Usage query: test7 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_eph_usage resource_id: kube-system.coredns-787d4945fb-l85r5 @@ -44,7 +44,7 @@ unit: bytes property: Network.Usage query: test8 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_net_usage resource_id: kube-system.coredns-787d4945fb-l85r5 diff --git a/app/core/__fixture__/snapshot/multinode/metric_nodes.yaml b/app/core/__fixture__/snapshot/multinode/metric_nodes.yaml index 4ec62d0..a6fc819 100644 --- a/app/core/__fixture__/snapshot/multinode/metric_nodes.yaml +++ b/app/core/__fixture__/snapshot/multinode/metric_nodes.yaml @@ -5,7 +5,7 @@ unit: coreseconds property: CPU.Usage query: test1 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: cpu_usage resource_id: glaciation-test-master01 @@ -18,7 +18,7 @@ unit: coreseconds property: GPU.Usage query: test2 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: gpu_usage resource_id: glaciation-test-master01 @@ -31,7 +31,7 @@ unit: bytes property: RAM.Usage query: test3 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: ram_usage resource_id: glaciation-test-master01 @@ -44,7 +44,7 @@ unit: bytes property: Storage.Usage query: test4 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: eph_usage resource_id: glaciation-test-master01 @@ -57,7 +57,7 @@ unit: bytes5 property: Network.Usage query: test - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: net_usage resource_id: glaciation-test-master01 @@ -70,7 +70,7 @@ unit: coreseconds property: CPU.Usage query: test6 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: cpu_usage resource_id: glaciation-test-worker01 @@ -83,7 +83,7 @@ unit: bytes property: RAM.Usage query: test7 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: ram_usage resource_id: glaciation-test-worker01 @@ -96,7 +96,7 @@ unit: bytes property: Storage.Usage query: test8 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: eph_usage resource_id: glaciation-test-worker01 @@ -109,7 +109,7 @@ unit: bytes9 property: Network.Usage query: test - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: net_usage resource_id: glaciation-test-worker01 diff --git a/app/core/__fixture__/snapshot/multinode/metric_pods.yaml b/app/core/__fixture__/snapshot/multinode/metric_pods.yaml index 52eaaf2..caa8859 100644 --- a/app/core/__fixture__/snapshot/multinode/metric_pods.yaml +++ b/app/core/__fixture__/snapshot/multinode/metric_pods.yaml @@ -6,7 +6,7 @@ unit: coreseconds property: CPU.Usage query: pods1 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_cpu_usage resource_id: kube-system.coredns-787d4945fb-l85r5 @@ -19,7 +19,7 @@ unit: bytes property: RAM.Usage query: pods2 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_ram_usage resource_id: kube-system.coredns-787d4945fb-l85r5 @@ -32,7 +32,7 @@ unit: bytes property: Storage.Usage query: pods3 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_eph_usage resource_id: kube-system.coredns-787d4945fb-l85r5 @@ -45,7 +45,7 @@ unit: bytes property: Network.Usage query: pods4 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_net_usage resource_id: kube-system.coredns-787d4945fb-l85r5 @@ -59,7 +59,7 @@ unit: coreseconds property: CPU.Usage query: pods5 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_cpu_usage resource_id: kube-flannel.kube-flannel-ds-848v8 @@ -72,7 +72,7 @@ unit: bytes property: RAM.Usage query: pods6 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_ram_usage resource_id: kube-flannel.kube-flannel-ds-848v8 @@ -85,7 +85,7 @@ unit: bytes property: Storage.Usage query: pods7 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_eph_usage resource_id: kube-flannel.kube-flannel-ds-848v8 @@ -98,7 +98,7 @@ unit: bytes property: Network.Usage query: pods8 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_net_usage resource_id: kube-flannel.kube-flannel-ds-848v8 @@ -112,7 +112,7 @@ unit: coreseconds property: CPU.Usage query: pods9 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_cpu_usage resource_id: vault.init-vault-cluster-cbqhq @@ -125,7 +125,7 @@ unit: bytes property: RAM.Usage query: pods10 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_ram_usage resource_id: vault.init-vault-cluster-cbqhq @@ -138,7 +138,7 @@ unit: bytes property: Storage.Usage query: pods11 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_eph_usage resource_id: vault.init-vault-cluster-cbqhq @@ -151,7 +151,7 @@ unit: bytes property: Network.Usage query: pods12 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_net_usage resource_id: vault.init-vault-cluster-cbqhq @@ -165,7 +165,7 @@ unit: coreseconds property: CPU.Usage query: pods13 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_cpu_usage resource_id: minio-tenant.glaciation-pool-0-0 @@ -178,7 +178,7 @@ unit: bytes property: RAM.Usage query: pods14 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_ram_usage resource_id: minio-tenant.glaciation-pool-0-0 @@ -191,7 +191,7 @@ unit: bytes property: Storage.Usage query: pods15 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_eph_usage resource_id: minio-tenant.glaciation-pool-0-0 @@ -204,7 +204,7 @@ unit: bytes property: Network.Usage query: pods16 - result_parser: SimpleResultParser + result_parser: PROMETHEUS_PARSER value: metric_id: pod_net_usage resource_id: minio-tenant.glaciation-pool-0-0 diff --git a/app/core/builder/kg_builder.py b/app/core/builder/kg_builder.py index e4ff08e..86a6c4d 100644 --- a/app/core/builder/kg_builder.py +++ b/app/core/builder/kg_builder.py @@ -1,5 +1,3 @@ -from typing import List - import asyncio from loguru import logger @@ -13,17 +11,12 @@ from app.core.builder.slice_strategy.slice_per_node import SlicePerNode from app.core.builder.slice_strategy.slice_strategy import SliceStrategy from app.core.repository.metric_repository import MetricRepository -from app.core.repository.types import MetricQuery -from app.core.types import DKGSlice, MetricSnapshot +from app.core.repository.query_settings import QuerySettings +from app.core.types import DKGSlice from app.core.updater.kg_repository import KGRepository from app.util.clock import Clock -class QuerySettings(BaseSettings): - pod_queries: List[MetricQuery] = [] - node_queries: List[MetricQuery] = [] - - class KGBuilderSettings(BaseSettings): builder_tick_seconds: int node_port: int @@ -58,7 +51,7 @@ def __init__( queue: AsyncQueue[DKGSlice], k8s_client: K8SClient, kg_repository: KGRepository, - influxdb_repository: MetricRepository, + metric_repository: MetricRepository, settings: KGBuilderSettings, ): self.terminated = terminated @@ -66,7 +59,7 @@ def __init__( self.k8s_client = k8s_client self.queue = queue self.kg_repository = kg_repository - self.influxdb_repository = influxdb_repository + self.metric_repository = metric_repository self.settings = settings self.slice_strategy = ( SlicePerCluster(settings.single_slice_url) @@ -100,21 +93,11 @@ async def run(self) -> None: async def run_cycle(self, now_seconds: int) -> None: now = now_seconds * 1000 - ( - cluster_snapshot, - pod_metrics, - node_metrics, - ) = await asyncio.gather( + (cluster_snapshot, metric_snapshot) = await asyncio.gather( self.k8s_client.fetch_snapshot(), - self.influxdb_repository.query_many(now, self.settings.queries.pod_queries), - self.influxdb_repository.query_many( - now, self.settings.queries.node_queries - ), - ) - metric_snapshot = MetricSnapshot( - list(zip(self.settings.queries.pod_queries, pod_metrics)), - list(zip(self.settings.queries.node_queries, node_metrics)), + self.metric_repository.metric_snapshot(now, self.settings.queries), ) + logger.debug("Cluster snapshot: {size}", size=len(cluster_snapshot.cluster)) logger.debug("Nodes: {size}", size=len(cluster_snapshot.nodes)) logger.debug("Pods: {size}", size=len(cluster_snapshot.pods)) diff --git a/app/core/builder/test_kg_builder.py b/app/core/builder/test_kg_builder.py index afd6741..4255106 100644 --- a/app/core/builder/test_kg_builder.py +++ b/app/core/builder/test_kg_builder.py @@ -2,14 +2,15 @@ import datetime from unittest import TestCase -from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient from app.clients.k8s.mock_k8s_client import MockK8SClient from app.clients.metadata_service.mock_metadata_service_client import ( MockMetadataServiceClient, ) +from app.clients.prometheus.mock_prometheus_client import MockPrometheusClient from app.core.async_queue import AsyncQueue -from app.core.builder.kg_builder import KGBuilder, KGBuilderSettings, QuerySettings +from app.core.builder.kg_builder import KGBuilder, KGBuilderSettings from app.core.repository.metric_repository import MetricRepository +from app.core.repository.query_settings import QuerySettings from app.core.test_graph_fixture import TestGraphFixture from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import DKGSlice, KGSliceId @@ -23,7 +24,7 @@ class KGBuilderTest(TestCase, TestGraphFixture, SnapshotTestBase): clock: Clock client: MockMetadataServiceClient k8s_client: MockK8SClient - influxdb_client: MockInfluxDBClient + metric_client: MockPrometheusClient queue: AsyncQueue[DKGSlice] running_event: asyncio.Event runner: asyncio.Runner @@ -33,7 +34,7 @@ def setUp(self) -> None: self.maxDiff = None self.clock = MockClock() self.client = MockMetadataServiceClient() - self.influxdb_client = MockInfluxDBClient() + self.metric_client = MockPrometheusClient() self.queue = AsyncQueue() self.k8s_client = MockK8SClient() self.running_event = asyncio.Event() @@ -48,7 +49,7 @@ def setUp(self) -> None: def test_build_minimal(self) -> None: self.mock_inputs( - "minimal", self.k8s_client, self.influxdb_client, self.settings.queries + "minimal", self.k8s_client, self.metric_client, self.settings.queries ) builder = self.create_builder() @@ -63,7 +64,7 @@ def test_build_minimal(self) -> None: def create_builder(self) -> KGBuilder: repository = KGRepository(self.client) - influxdb_repository = MetricRepository(self.influxdb_client) + influxdb_repository = MetricRepository(self.metric_client) return KGBuilder( self.running_event, self.clock, diff --git a/app/core/repository/metric_repository.py b/app/core/repository/metric_repository.py index 5c1a97a..851771f 100644 --- a/app/core/repository/metric_repository.py +++ b/app/core/repository/metric_repository.py @@ -1,10 +1,11 @@ -from typing import List +from typing import List, Tuple import asyncio from app.core.repository.metricstore_client import MetricStoreClient +from app.core.repository.query_settings import QuerySettings from app.core.repository.types import MetricQuery -from app.core.types import MetricValue +from app.core.types import MetricSnapshot, MetricValue class MetricRepository: @@ -13,13 +14,26 @@ class MetricRepository: def __init__(self, client: MetricStoreClient): self.client = client + async def metric_snapshot(self, now: int, queries: QuerySettings) -> MetricSnapshot: + (pod_metrics, node_metrics) = await asyncio.gather( + self.query_many(now, queries.pod_queries), + self.query_many(now, queries.node_queries), + ) + return MetricSnapshot(pod_metrics, node_metrics) + async def query_many( self, now: int, queries: List[MetricQuery] - ) -> List[MetricValue]: + ) -> List[Tuple[MetricQuery, MetricValue]]: query_futures = [self.query_one(now, query) for query in queries] - query_results: List[List[MetricValue]] = await asyncio.gather(*query_futures) + query_results: List[ + List[Tuple[MetricQuery, MetricValue]] + ] = await asyncio.gather(*query_futures) return [element for elements in query_results for element in elements] - async def query_one(self, now: int, query: MetricQuery) -> List[MetricValue]: + async def query_one( + self, now: int, query: MetricQuery + ) -> List[Tuple[MetricQuery, MetricValue]]: result_parser = query.result_parser.get_by_name() - return await self.client.query(query.query, result_parser) + query_str = query.query.replace("{{now}}", str(now)) + results = await self.client.query(query_str, result_parser) + return [(query, result) for result in results] diff --git a/app/core/repository/query_settings.py b/app/core/repository/query_settings.py new file mode 100644 index 0000000..bdd5847 --- /dev/null +++ b/app/core/repository/query_settings.py @@ -0,0 +1,10 @@ +from typing import List + +from pydantic_settings import BaseSettings + +from app.core.repository.types import MetricQuery + + +class QuerySettings(BaseSettings): + pod_queries: List[MetricQuery] = [] + node_queries: List[MetricQuery] = [] diff --git a/app/core/repository/test_metric_repository.py b/app/core/repository/test_metric_repository.py index 789a272..dfa96e6 100644 --- a/app/core/repository/test_metric_repository.py +++ b/app/core/repository/test_metric_repository.py @@ -1,18 +1,18 @@ import asyncio from unittest import TestCase -from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient +from app.clients.prometheus.mock_prometheus_client import MockPrometheusClient from app.core.repository.metric_repository import MetricRepository from app.core.repository.types import MetricQuery, ResultParserId from app.core.types import MetricValue class MetricRepositoryTest(TestCase): - client: MockInfluxDBClient + client: MockPrometheusClient repository: MetricRepository def setUp(self) -> None: - self.client = MockInfluxDBClient() + self.client = MockPrometheusClient() self.repository = MetricRepository(self.client) def test_query_one(self) -> None: @@ -26,11 +26,11 @@ def test_query_one(self) -> None: query="test_query", unit="bytes", property="property", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ) actual = asyncio.run(self.repository.query_one(now, query)) - self.assertEqual([expected], actual) + self.assertEqual([(query, expected)], actual) def test_query_many(self) -> None: expected1 = MetricValue("id1", "pod1", 100500, 41.0) @@ -47,7 +47,7 @@ def test_query_many(self) -> None: unit="bytes", property="property", query="test_query1", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ) query2 = MetricQuery( measurement_id="measurement", @@ -56,7 +56,7 @@ def test_query_many(self) -> None: query="test_query2", unit="bytes", property="property", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ) query3 = MetricQuery( measurement_id="measurement", @@ -65,8 +65,10 @@ def test_query_many(self) -> None: query="test_query3", unit="bytes", property="property", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ) actual = asyncio.run(self.repository.query_many(now, [query1, query2, query3])) - self.assertEqual([expected1, expected2, expected3], actual) + self.assertEqual( + [(query1, expected1), (query2, expected2), (query3, expected3)], actual + ) diff --git a/app/core/repository/test_result_parser_id.py b/app/core/repository/test_result_parser_id.py new file mode 100644 index 0000000..b1ad8c8 --- /dev/null +++ b/app/core/repository/test_result_parser_id.py @@ -0,0 +1,10 @@ +from unittest import TestCase + +from app.core.repository.types import ResultParserId + + +class ResultParserIdTest(TestCase): + def test_get_by_name(self) -> None: + parser = ResultParserId.PROMETHEUS_PARSER.get_by_name() + + self.assertEqual(type(parser).__name__, "PrometheusResultParser") diff --git a/app/core/repository/types.py b/app/core/repository/types.py index 91fe045..8118972 100644 --- a/app/core/repository/types.py +++ b/app/core/repository/types.py @@ -6,13 +6,13 @@ class ResultParserId(StrEnum): - SIMPLE_RESULT_PARSER = ( - "app.clients.influxdb.simple_result_parser.SimpleResultParser" - ) + PROMETHEUS_PARSER = "PROMETHEUS_PARSER" def get_by_name(self) -> Any: - if self.name == "SIMPLE_RESULT_PARSER": - return self.instantiate(self.value) + if self.name == ResultParserId.PROMETHEUS_PARSER: + return self.instantiate( + "app.clients.prometheus.prometheus_result_parser.PrometheusResultParser" + ) else: raise Exception(f"Unknown parser for {self}") diff --git a/app/core/test_snapshot_base.py b/app/core/test_snapshot_base.py index 146781e..0c5f9ea 100644 --- a/app/core/test_snapshot_base.py +++ b/app/core/test_snapshot_base.py @@ -7,10 +7,10 @@ import yaml -from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient from app.clients.k8s.k8s_client import ResourceSnapshot from app.clients.k8s.mock_k8s_client import MockK8SClient -from app.core.builder.kg_builder import QuerySettings +from app.clients.prometheus.mock_prometheus_client import MockPrometheusClient +from app.core.repository.query_settings import QuerySettings from app.core.repository.types import MetricQuery, ResultParserId from app.core.types import KGSliceId, MetricSnapshot, MetricValue, SliceInputs from app.kg.graph import Graph @@ -27,7 +27,7 @@ def mock_inputs( self, identity: str, k8s_client: MockK8SClient, - influxdb_client: MockInfluxDBClient, + metric_client: MockPrometheusClient, settings: QuerySettings, ) -> None: resources = self.load_k8s_snapshot(identity) @@ -44,11 +44,11 @@ def mock_inputs( metrics = self.load_metric_snapshot(identity) for query, value in metrics.node_metrics: - influxdb_client.mock_query(query.query, [value]) + metric_client.mock_query(query.query, [value]) settings.node_queries.append(query) for query, value in metrics.pod_metrics: - influxdb_client.mock_query(query.query, [value]) + metric_client.mock_query(query.query, [value]) settings.pod_queries.append(query) def get_inputs(self, identity: str) -> SliceInputs: @@ -94,7 +94,7 @@ def load_metrics( result = [] for query_and_value in query_and_values: query = self.dataclass_from_dict(MetricQuery, query_and_value["query"]) - query.result_parser = ResultParserId.SIMPLE_RESULT_PARSER # TODO parse + query.result_parser = ResultParserId.PROMETHEUS_PARSER # TODO parse value = self.dataclass_from_dict(MetricValue, query_and_value["value"]) result.append((query, value)) return result diff --git a/app/kgexporter_settings.py b/app/kgexporter_settings.py index e97f4c5..615f439 100644 --- a/app/kgexporter_settings.py +++ b/app/kgexporter_settings.py @@ -1,6 +1,5 @@ from pydantic_settings import BaseSettings -from app.clients.influxdb.influxdb_settings import InfluxDBSettings from app.clients.k8s.k8s_settings import K8SSettings from app.clients.metadata_service.metadata_service_settings import ( MetadataServiceSettings, @@ -16,7 +15,6 @@ class PrometheusSettings(BaseSettings): class KGExporterSettings(BaseSettings): builder: KGBuilderSettings k8s: K8SSettings - influxdb: InfluxDBSettings metadata: MetadataServiceSettings prometheus_client: PrometheusClientSettings prometheus: PrometheusSettings diff --git a/app/test_kgexporter_context.py b/app/test_kgexporter_context.py index 7d6aa6c..12ec064 100644 --- a/app/test_kgexporter_context.py +++ b/app/test_kgexporter_context.py @@ -2,8 +2,6 @@ from unittest import TestCase -from app.clients.influxdb.influxdb_settings import InfluxDBSettings -from app.clients.influxdb.mock_infuxdbclient import MockInfluxDBClient from app.clients.k8s.k8s_settings import K8SSettings from app.clients.k8s.mock_k8s_client import MockK8SClient from app.clients.metadata_service.metadata_service_settings import ( @@ -13,8 +11,10 @@ MockMetadataServiceClient, SerializedGraph, ) +from app.clients.prometheus.mock_prometheus_client import MockPrometheusClient from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings -from app.core.builder.kg_builder import KGBuilderSettings, QuerySettings +from app.core.builder.kg_builder import KGBuilderSettings +from app.core.repository.query_settings import QuerySettings from app.core.test_snapshot_base import SnapshotTestBase from app.core.types import KGSliceId from app.kgexporter_context import KGExporterContext @@ -28,7 +28,7 @@ class KGExporterContextTest(TestCase, SnapshotTestBase): clock: Clock metadata_client: MockMetadataServiceClient k8s_client: MockK8SClient - influxdb_client: MockInfluxDBClient + metric_client: MockPrometheusClient jsonld_config: JsonLDConfiguration settings: KGExporterSettings context: KGExporterContext @@ -37,13 +37,13 @@ def setUp(self) -> None: self.clock = MockClock() self.metadata_client = MockMetadataServiceClient() self.k8s_client = MockK8SClient() - self.influxdb_client = MockInfluxDBClient() + self.metric_client = MockPrometheusClient() self.settings = self.test_kg_exporter_settings() self.context = KGExporterContext( self.clock, self.metadata_client, self.k8s_client, - self.influxdb_client, + self.metric_client, self.settings, ) @@ -51,7 +51,7 @@ def test_end_to_end_minimal(self) -> None: self.mock_inputs( "minimal", self.k8s_client, - self.influxdb_client, + self.metric_client, self.settings.builder.queries, ) self.context.start() @@ -67,7 +67,7 @@ def test_end_to_end_multinode(self) -> None: self.mock_inputs( "multinode", self.k8s_client, - self.influxdb_client, + self.metric_client, self.settings.builder.queries, ) self.context.start() @@ -96,9 +96,6 @@ def test_kg_exporter_settings(self) -> KGExporterSettings: single_slice_url="http://metadata-service:80", ), k8s=K8SSettings(in_cluster=True), - influxdb=InfluxDBSettings( - url="test", token="token", org="org", timeout=60000 - ), metadata=MetadataServiceSettings(), prometheus=PrometheusSettings(endpoint_port=8080), prometheus_client=PrometheusClientSettings(url="prometheus.integration"), diff --git a/app/test_pydantic_yaml.py b/app/test_pydantic_yaml.py index 72d9608..77faa49 100644 --- a/app/test_pydantic_yaml.py +++ b/app/test_pydantic_yaml.py @@ -1,13 +1,13 @@ from tempfile import TemporaryDirectory from unittest import TestCase -from app.clients.influxdb.influxdb_settings import InfluxDBSettings from app.clients.k8s.k8s_settings import K8SSettings from app.clients.metadata_service.metadata_service_settings import ( MetadataServiceSettings, ) from app.clients.prometheus.prometheus_client_settings import PrometheusClientSettings -from app.core.builder.kg_builder import KGBuilderSettings, QuerySettings +from app.core.builder.kg_builder import KGBuilderSettings +from app.core.repository.query_settings import QuerySettings from app.kgexporter_settings import KGExporterSettings, PrometheusSettings from app.pydantic_yaml import from_yaml, to_yaml @@ -23,9 +23,6 @@ def test_dump_load_settings(self): single_slice_url="metadata-service:80", ), k8s=K8SSettings(in_cluster=True), - influxdb=InfluxDBSettings( - url="test", token="token", org="org", timeout=60000 - ), metadata=MetadataServiceSettings(), prometheus=PrometheusSettings(endpoint_port=8080), prometheus_client=PrometheusClientSettings(url="prometheus.integration"), diff --git a/app/transform/metrics/test_node_metric_transformer.py b/app/transform/metrics/test_node_metric_transformer.py index 072d023..f67de30 100644 --- a/app/transform/metrics/test_node_metric_transformer.py +++ b/app/transform/metrics/test_node_metric_transformer.py @@ -21,7 +21,7 @@ class NodeMetricToGraphTransformerTest(MetricTransformTestBase): unit="coreseconds", property="CPU.Usage", query="query", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ), MetricValue( metric_id="my_metric", @@ -38,7 +38,7 @@ class NodeMetricToGraphTransformerTest(MetricTransformTestBase): unit="milliwatt", property="Energy.Usage", query="query", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ), MetricValue( metric_id="my_metric", diff --git a/app/transform/metrics/test_pod_metric_transformer.py b/app/transform/metrics/test_pod_metric_transformer.py index 185f16a..76c878e 100644 --- a/app/transform/metrics/test_pod_metric_transformer.py +++ b/app/transform/metrics/test_pod_metric_transformer.py @@ -21,7 +21,7 @@ class PodMetricToGraphTransformerTest(MetricTransformTestBase): unit="coreseconds", property="CPU.Usage", query="query", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ), MetricValue( metric_id="my_metric", @@ -38,7 +38,7 @@ class PodMetricToGraphTransformerTest(MetricTransformTestBase): unit="milliwatt", property="Energy.Usage", query="query", - result_parser=ResultParserId.SIMPLE_RESULT_PARSER, + result_parser=ResultParserId.PROMETHEUS_PARSER, ), MetricValue( metric_id="my_metric", diff --git a/charts/app/values.yaml b/charts/app/values.yaml index cac8dca..573894d 100644 --- a/charts/app/values.yaml +++ b/charts/app/values.yaml @@ -37,11 +37,6 @@ settings: queries: node_queries: [] pod_queries: [] - influxdb: - org: org - timeout: 60000 - token: token - url: test k8s: in_cluster: true metadata: diff --git a/etc/config.yaml b/etc/config.yaml index b2182ff..5265ced 100644 --- a/etc/config.yaml +++ b/etc/config.yaml @@ -4,13 +4,16 @@ builder: is_single_slice: True single_slice_url: http://metadata.integration:80 queries: - node_queries: [] + node_queries: + - measurement_id: Energy + subresource: Energy + source: Kepler + unit: milliwatts + property: Energy.Usage + query: label_replace(kepler_node_platform_joules_total, "resource", "$1", "exported_instance", "(.+)") + result_parser: PROMETHEUS_PARSER + pod_queries: [] -influxdb: - org: org - timeout: 60000 - token: token - url: test k8s: in_cluster: false metadata: @@ -19,4 +22,4 @@ metadata: prometheus: endpoint_port: 8080 prometheus_client: - url: prometheus.integration + url: http://prometheus.integration From deb605cddc8a57fa3969db6f5f748e1797b11a80 Mon Sep 17 00:00:00 2001 From: ktatarnikov Date: Fri, 12 Jul 2024 13:44:44 +0200 Subject: [PATCH 61/61] HHT-669: using async prometheus metric server --- app/kgexporter_context.py | 12 +++--- poetry.lock | 78 ++++++++++++--------------------------- pyproject.toml | 3 +- 3 files changed, 31 insertions(+), 62 deletions(-) diff --git a/app/kgexporter_context.py b/app/kgexporter_context.py index 53d1173..9cda278 100644 --- a/app/kgexporter_context.py +++ b/app/kgexporter_context.py @@ -1,10 +1,9 @@ from typing import Any, List import asyncio -from wsgiref.simple_server import WSGIServer from loguru import logger -from prometheus_client import start_http_server +from prometheus_async.aio.web import MetricsHTTPServer, start_http_server from app.clients.k8s.k8s_client import K8SClient from app.clients.metadata_service.metadata_service_client import MetadataServiceClient @@ -25,7 +24,7 @@ class KGExporterContext: queue: AsyncQueue[DKGSlice] runner: asyncio.Runner terminated: asyncio.Event - prometheus_server: WSGIServer + prometheus_server: MetricsHTTPServer tasks: List[asyncio.Task[Any]] settings: KGExporterSettings @@ -60,16 +59,17 @@ def start(self) -> None: return self.terminated.clear() self.runner.run(self.run_tasks()) - server, _ = start_http_server(self.settings.prometheus.endpoint_port) - self.prometheus_server = server async def run_tasks(self) -> None: self.tasks.append(asyncio.create_task(self.builder.run())) self.tasks.append(asyncio.create_task(self.updater.run())) + self.prometheus_server = await start_http_server( + port=self.settings.prometheus.endpoint_port + ) def stop(self) -> None: self.terminated.set() - self.prometheus_server.shutdown() + self.runner.run(self.prometheus_server.close()) def wait_for_termination(self) -> None: self.runner.run(self.terminated.wait()) diff --git a/poetry.lock b/poetry.lock index 993c70d..03b9785 100644 --- a/poetry.lock +++ b/poetry.lock @@ -792,30 +792,6 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] -[[package]] -name = "influxdb-client" -version = "1.44.0" -description = "InfluxDB 2.0 Python client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "influxdb_client-1.44.0-py3-none-any.whl", hash = "sha256:e4c1ac9c9925c4693d63e988e22f65d2ddc1867f8910813b7f4721633175f2a0"}, - {file = "influxdb_client-1.44.0.tar.gz", hash = "sha256:da9bc0cc49de4a0ac844d833c1efa65227ec5a2254e63cdbe07b5d532c0c37f8"}, -] - -[package.dependencies] -certifi = ">=14.05.14" -python-dateutil = ">=2.5.3" -reactivex = ">=4.0.4" -setuptools = ">=21.0.0" -urllib3 = ">=1.26.0" - -[package.extras] -async = ["aiocsv (>=1.2.2)", "aiohttp (>=3.8.1)"] -ciso = ["ciso8601 (>=2.1.1)"] -extra = ["numpy", "pandas (>=1.0.0)"] -test = ["aioresponses (>=0.7.3)", "coverage (>=4.0.3)", "flake8 (>=5.0.3)", "httpretty (==1.0.5)", "jinja2 (>=3.1.4)", "nose (>=1.3.7)", "pluggy (>=0.3.1)", "psutil (>=5.6.3)", "py (>=1.4.31)", "pytest (>=5.0.0)", "pytest-cov (>=3.0.0)", "pytest-timeout (>=2.1.0)", "randomize (>=0.13)", "sphinx (==1.8.5)", "sphinx-rtd-theme"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1595,6 +1571,29 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "prometheus-async" +version = "22.2.0" +description = "Async helpers for prometheus_client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "prometheus-async-22.2.0.tar.gz", hash = "sha256:b0426370eb3b3bacd99afcf1fcc669c118cb67603cc951a6fe12434e9d4307f2"}, + {file = "prometheus_async-22.2.0-py3-none-any.whl", hash = "sha256:5cbfa535561342b834c087c4f3f3be0a3cb8785a0b8748111c916f3d68bbc370"}, +] + +[package.dependencies] +prometheus_client = ">=0.8.0" +wrapt = "*" + +[package.extras] +aiohttp = ["aiohttp (>=3)"] +consul = ["aiohttp (>=3)"] +dev = ["aiohttp", "cogapp", "coverage[toml]", "furo", "mypy", "myst-parser", "pre-commit", "pytest", "pytest-asyncio", "pytest-twisted", "sphinx", "sphinx-notfound-page", "sphinxcontrib-asyncio", "tomli", "twisted"] +docs = ["aiohttp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-asyncio", "twisted"] +tests = ["coverage[toml]", "pytest", "pytest-asyncio"] +twisted = ["twisted"] + [[package]] name = "prometheus-client" version = "0.20.0" @@ -1963,20 +1962,6 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] -[[package]] -name = "reactivex" -version = "4.0.4" -description = "ReactiveX (Rx) for Python" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "reactivex-4.0.4-py3-none-any.whl", hash = "sha256:0004796c420bd9e68aad8e65627d85a8e13f293de76656165dffbcb3a0e3fb6a"}, - {file = "reactivex-4.0.4.tar.gz", hash = "sha256:e912e6591022ab9176df8348a653fe8c8fa7a301f26f9931c9d8c78a650e04e8"}, -] - -[package.dependencies] -typing-extensions = ">=4.1.1,<5.0.0" - [[package]] name = "regex" version = "2024.5.15" @@ -2118,21 +2103,6 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "setuptools" -version = "70.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, - {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -2486,4 +2456,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11.4" -content-hash = "74388b3ae0459374c608e2e4b4ca682d7830b351609322453be4c51bbe38a93f" +content-hash = "b8c1eda636287d983998c4f63a88c16902b484aee0377705711ab6e52661352a" diff --git a/pyproject.toml b/pyproject.toml index 8d5d4b6..d699820 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,11 +20,10 @@ jsonpath-ng = "^1.6.1" httpx = "^0.27.0" pydantic-settings = "^2.2.1" pytest-vcr = "^1.0.2" -influxdb-client = "^1.43.0" -# aiocsv = "1.3.2" aiohttp = "3.9.3" loguru = "0.7.2" prometheus-client = "0.20.0" +prometheus-async = "22.2.0" aioprometheus-api-client = "0.0.2" kubernetes_asyncio = "30.1.1"