From e23c1a4ac9956ce3c0d64163009114ad2eac8830 Mon Sep 17 00:00:00 2001 From: ImMin5 Date: Sun, 17 Dec 2023 16:31:47 +0900 Subject: [PATCH 1/4] build: helm chart version --- deploy/helm/Chart.yaml | 2 +- deploy/helm/config/config.yaml | 9 --------- pkg/pip_requirements.txt | 1 - src/setup.py | 1 - 4 files changed, 1 insertion(+), 12 deletions(-) diff --git a/deploy/helm/Chart.yaml b/deploy/helm/Chart.yaml index 8982d338..ac29de91 100644 --- a/deploy/helm/Chart.yaml +++ b/deploy/helm/Chart.yaml @@ -4,6 +4,6 @@ description: SpaceONE Cost Analysis Helm chart for Kubernetes type: application -version: 1.3.15 +version: 1.3.164 appVersion: 1.x.y diff --git a/deploy/helm/config/config.yaml b/deploy/helm/config/config.yaml index 1ae25981..2aaea4a8 100644 --- a/deploy/helm/config/config.yaml +++ b/deploy/helm/config/config.yaml @@ -31,15 +31,6 @@ GLOBAL: file: type: file filename: /var/log/spaceone/cost_analysis.log - HANDLERS: - authentication: - - backend: spaceone.core.handler.authentication_handler.AuthenticationGRPCHandler - uri: grpc://identity:50051/v1/Domain/get_public_key - authorization: - - backend: spaceone.core.handler.authorization_handler.AuthorizationGRPCHandler - uri: grpc://identity:50051/v1/Authorization/verify - mutation: - - backend: spaceone.core.handler.mutation_handler.SpaceONEMutationHandler QUEUES: cost_analysis_q: backend: spaceone.core.queue.redis_queue.RedisQueue diff --git a/pkg/pip_requirements.txt b/pkg/pip_requirements.txt index 434f9267..b2bb405d 100644 --- a/pkg/pip_requirements.txt +++ b/pkg/pip_requirements.txt @@ -1,4 +1,3 @@ -spaceone-core spaceone-api boto3 pandas diff --git a/src/setup.py b/src/setup.py index 28748132..494ae123 100644 --- a/src/setup.py +++ b/src/setup.py @@ -27,7 +27,6 @@ license='Apache License 2.0', packages=find_packages(), install_requires=[ - 'spaceone-core', 'spaceone-api', 'boto3', 'pandas', From f98335aad123e1e2a3fb888c0c753caaaf20f719 Mon Sep 17 00:00:00 2001 From: ImMin5 Date: Mon, 18 Dec 2023 02:36:48 +0900 Subject: [PATCH 2/4] refactor: modify all apis based on identity v2 --- src/spaceone/cost_analysis/error/budget.py | 39 +- .../cost_analysis/info/budget_info.py | 71 +- .../cost_analysis/info/budget_usage_info.py | 45 +- src/spaceone/cost_analysis/info/cost_info.py | 55 +- .../cost_analysis/info/data_source_info.py | 71 +- .../info/data_source_rule_info.py | 68 +- src/spaceone/cost_analysis/info/job_info.py | 48 +- .../cost_analysis/info/job_task_info.py | 44 +- .../cost_analysis/manager/__init__.py | 12 +- .../cost_analysis/manager/budget_manager.py | 32 +- .../manager/budget_usage_manager.py | 351 ++++++---- .../cost_analysis/manager/cost_manager.py | 295 +++++--- .../manager/cost_query_set_manager.py | 31 +- .../manager/data_source_manager.py | 29 +- .../manager/data_source_rule_manager.py | 226 +++--- .../cost_analysis/manager/identity_manager.py | 83 +-- .../cost_analysis/manager/job_manager.py | 149 ++-- .../cost_analysis/manager/job_task_manager.py | 120 ++-- .../cost_analysis/model/budget_model.py | 71 +- .../cost_analysis/model/budget_usage_model.py | 51 +- .../cost_analysis/model/cost_model.py | 109 +-- .../cost_analysis/model/data_source_model.py | 76 +- .../model/data_source_rule_model.py | 62 +- src/spaceone/cost_analysis/model/job_model.py | 55 +- .../cost_analysis/model/job_task_model.py | 57 +- .../cost_analysis/service/budget_service.py | 298 +++++--- .../service/budget_usage_service.py | 102 +-- .../service/cost_query_set_service.py | 117 ++-- .../cost_analysis/service/cost_service.py | 181 +++-- .../service/data_source_rule_service.py | 367 ++++++---- .../service/data_source_service.py | 467 ++++++++----- .../cost_analysis/service/job_service.py | 653 +++++++++++------- .../cost_analysis/service/job_task_service.py | 57 +- 33 files changed, 2687 insertions(+), 1805 deletions(-) diff --git a/src/spaceone/cost_analysis/error/budget.py b/src/spaceone/cost_analysis/error/budget.py index 30366d66..fd3a13c8 100644 --- a/src/spaceone/cost_analysis/error/budget.py +++ b/src/spaceone/cost_analysis/error/budget.py @@ -2,51 +2,62 @@ class ERROR_ONLY_ONF_OF_PROJECT_OR_PROJECT_GROUP(ERROR_INVALID_ARGUMENT): - _message = 'Only one of project_id or project_group_id is allowed.' + _message = "Only one of project_id or project_group_id is allowed." class ERROR_INVALID_TIME_RANGE(ERROR_INVALID_ARGUMENT): - _message = 'Budget end time must be greater than start time. (start = {start}, end = {end})' + _message = "Budget end time must be greater than start time. (start = {start}, end = {end})" class ERROR_NO_DATE_IN_PLANNED_LIMITS(ERROR_INVALID_ARGUMENT): - _message = 'No date in the planned limits. (date = {date})' + _message = "No date in the planned limits. (date = {date})" class ERROR_DATE_IS_REQUIRED(ERROR_INVALID_ARGUMENT): - _message = 'Date is required for planned limits. (key = planned_limits, value = {value})' + _message = ( + "Date is required for planned limits. (key = planned_limits, value = {value})" + ) class ERROR_LIMIT_IS_WRONG(ERROR_INVALID_ARGUMENT): - _message = 'Limit must be greater than zero. (key = planned_limits, value = {value})' + _message = ( + "Limit must be greater than zero. (key = planned_limits, value = {value})" + ) class ERROR_DATE_IS_WRONG(ERROR_INVALID_ARGUMENT): - _message = 'Date is wrong in the planned limits. (wrong date = {date})' + _message = "Date is wrong in the planned limits. (wrong date = {date})" class ERROR_UNIT_IS_REQUIRED(ERROR_INVALID_ARGUMENT): - _message = 'Unit is required for notifications (key = notifications, value = {value})' + _message = ( + "Unit is required for notifications (key = notifications, value = {value})" + ) class ERROR_NOTIFICATION_TYPE_IS_REQUIRED(ERROR_INVALID_ARGUMENT): - _message = 'Notification type is required for notifications (key = notifications, value = {value})' + _message = "Notification type is required for notifications (key = notifications, value = {value})" class ERROR_THRESHOLD_IS_WRONG(ERROR_INVALID_ARGUMENT): - _message = 'Threshold must be greater than zero. (key = notifications, value = {value})' + _message = ( + "Threshold must be greater than zero. (key = notifications, value = {value})" + ) class ERROR_THRESHOLD_IS_WRONG_IN_PERCENT_TYPE(ERROR_INVALID_ARGUMENT): - _message = 'In percentage type, the threshold must be less than 100. (key = notifications, value = {value})' + _message = "In percentage type, the threshold must be less than 100. (key = notifications, value = {value})" + class ERROR_PROVIDER_FILTER_IS_EMPTY(ERROR_INVALID_ARGUMENT): - _message = 'Provider filter is empty. (key = provider_filter.providers, value = [])' + _message = "Provider filter is empty. (key = provider_filter.providers, value = [])" class ERROR_BUDGET_ALREADY_EXIST(ERROR_INVALID_ARGUMENT): - _message = 'Budget already exist. (data_source_id = {data_source_id}, target = {target})' + _message = ( + "Budget already exist. (data_source_id = {data_source_id}, target = {target})" + ) -class ERROR_NOTIFICATION_IS_NOT_SUPPORTED_IN_PROJECT_GROUP(ERROR_INVALID_ARGUMENT): - _message = 'Notification is not supported in project group. (target = {target})' +class ERROR_NOTIFICATION_IS_NOT_SUPPORTED_IN_PROJECT(ERROR_INVALID_ARGUMENT): + _message = "Notification is not supported in project. (target = {target})" diff --git a/src/spaceone/cost_analysis/info/budget_info.py b/src/spaceone/cost_analysis/info/budget_info.py index ed5d6dd9..b96775f2 100644 --- a/src/spaceone/cost_analysis/info/budget_info.py +++ b/src/spaceone/cost_analysis/info/budget_info.py @@ -3,9 +3,14 @@ from spaceone.api.cost_analysis.v1 import budget_pb2 from spaceone.core.pygrpc.message_type import * from spaceone.core import utils -from spaceone.cost_analysis.model.budget_model import Budget, PlannedLimit, Notification, ProviderFilter +from spaceone.cost_analysis.model.budget_model import ( + Budget, + PlannedLimit, + Notification, + ProviderFilter, +) -__all__ = ['BudgetInfo', 'BudgetsInfo'] +__all__ = ["BudgetInfo", "BudgetsInfo"] def PlannedLimitsInfo(planned_limit_vos: List[PlannedLimit]): @@ -15,10 +20,7 @@ def PlannedLimitsInfo(planned_limit_vos: List[PlannedLimit]): planned_limits_info = [] for vo in planned_limit_vos: - info = { - 'date': vo.date, - 'limit': vo.limit - } + info = {"date": vo.date, "limit": vo.limit} planned_limits_info.append(budget_pb2.PlannedLimit(**info)) @@ -33,9 +35,9 @@ def BudgetNotificationsInfo(notification_vos: List[Notification]): for vo in notification_vos: info = { - 'threshold': vo.threshold, - 'unit': vo.unit, - 'notification_type': vo.notification_type + "threshold": vo.threshold, + "unit": vo.unit, + "notification_type": vo.notification_type, } notifications_info.append(budget_pb2.BudgetNotification(**info)) @@ -48,8 +50,8 @@ def ProviderFilterInfo(provider_filter_vo: ProviderFilter): return None info = { - 'state': provider_filter_vo.state, - 'providers': list(provider_filter_vo.providers) + "state": provider_filter_vo.state, + "providers": list(provider_filter_vo.providers), } return budget_pb2.ProviderFilter(**info) @@ -57,32 +59,37 @@ def ProviderFilterInfo(provider_filter_vo: ProviderFilter): def BudgetInfo(budget_vo: Budget, minimal=False): info = { - 'budget_id': budget_vo.budget_id, - 'name': budget_vo.name, - 'limit': budget_vo.limit, - 'currency': budget_vo.currency, - 'provider_filter': ProviderFilterInfo(budget_vo.provider_filter), - 'project_id': budget_vo.project_id, - 'project_group_id': budget_vo.project_group_id, - 'data_source_id': budget_vo.data_source_id, + "budget_id": budget_vo.budget_id, + "name": budget_vo.name, + "limit": budget_vo.limit, + "currency": budget_vo.currency, + "provider_filter": ProviderFilterInfo(budget_vo.provider_filter), + "resource_group": budget_vo.resource_group, + "project_id": budget_vo.project_id, + "project_group_id": budget_vo.project_group_id, + "data_source_id": budget_vo.data_source_id, } if not minimal: - info.update({ - 'planned_limits': PlannedLimitsInfo(budget_vo.planned_limits), - 'time_unit': budget_vo.time_unit, - 'start': budget_vo.start, - 'end': budget_vo.end, - 'notifications': BudgetNotificationsInfo(budget_vo.notifications), - 'tags': change_struct_type(budget_vo.tags), - 'domain_id': budget_vo.domain_id, - 'created_at': utils.datetime_to_iso8601(budget_vo.created_at), - 'updated_at': utils.datetime_to_iso8601(budget_vo.updated_at) - }) + info.update( + { + "planned_limits": PlannedLimitsInfo(budget_vo.planned_limits), + "time_unit": budget_vo.time_unit, + "start": budget_vo.start, + "end": budget_vo.end, + "notifications": BudgetNotificationsInfo(budget_vo.notifications), + "tags": change_struct_type(budget_vo.tags), + "domain_id": budget_vo.domain_id, + "created_at": utils.datetime_to_iso8601(budget_vo.created_at), + "updated_at": utils.datetime_to_iso8601(budget_vo.updated_at), + } + ) return budget_pb2.BudgetInfo(**info) def BudgetsInfo(budget_vos, total_count, **kwargs): - return budget_pb2.BudgetsInfo(results=list( - map(functools.partial(BudgetInfo, **kwargs), budget_vos)), total_count=total_count) + return budget_pb2.BudgetsInfo( + results=list(map(functools.partial(BudgetInfo, **kwargs), budget_vos)), + total_count=total_count, + ) diff --git a/src/spaceone/cost_analysis/info/budget_usage_info.py b/src/spaceone/cost_analysis/info/budget_usage_info.py index 01a40dce..a16d638e 100644 --- a/src/spaceone/cost_analysis/info/budget_usage_info.py +++ b/src/spaceone/cost_analysis/info/budget_usage_info.py @@ -4,7 +4,7 @@ from spaceone.core import utils from spaceone.cost_analysis.model.budget_usage_model import BudgetUsage, ProviderFilter -__all__ = ['BudgetUsageInfo', 'BudgetUsagesInfo'] +__all__ = ["BudgetUsageInfo", "BudgetUsagesInfo"] def ProviderFilterInfo(provider_filter_vo: ProviderFilter): @@ -12,8 +12,8 @@ def ProviderFilterInfo(provider_filter_vo: ProviderFilter): return None info = { - 'state': provider_filter_vo.state, - 'providers': list(provider_filter_vo.providers) + "state": provider_filter_vo.state, + "providers": list(provider_filter_vo.providers), } return budget_usage_pb2.BudgetUsageProviderFilter(**info) @@ -21,27 +21,34 @@ def ProviderFilterInfo(provider_filter_vo: ProviderFilter): def BudgetUsageInfo(budget_usage_vo: BudgetUsage, minimal=False): info = { - 'budget_id': budget_usage_vo.budget_id, - 'name': budget_usage_vo.name, - 'date': budget_usage_vo.date, - 'cost': budget_usage_vo.cost, - 'limit': budget_usage_vo.limit, - 'currency': budget_usage_vo.currency, - 'provider_filter': ProviderFilterInfo(budget_usage_vo.provider_filter), - 'project_id': budget_usage_vo.project_id, - 'project_group_id': budget_usage_vo.project_group_id, - 'data_source_id': budget_usage_vo.data_source_id, + "budget_id": budget_usage_vo.budget_id, + "name": budget_usage_vo.name, + "date": budget_usage_vo.date, + "cost": budget_usage_vo.cost, + "limit": budget_usage_vo.limit, + "currency": budget_usage_vo.currency, + "provider_filter": ProviderFilterInfo(budget_usage_vo.provider_filter), + "resource_group": budget_usage_vo.resource_group, + "project_id": budget_usage_vo.project_id, + "data_source_id": budget_usage_vo.data_source_id, + "workspace_id": budget_usage_vo.workspace_id, } if not minimal: - info.update({ - 'domain_id': budget_usage_vo.domain_id, - 'updated_at': utils.datetime_to_iso8601(budget_usage_vo.updated_at) - }) + info.update( + { + "domain_id": budget_usage_vo.domain_id, + "updated_at": utils.datetime_to_iso8601(budget_usage_vo.updated_at), + } + ) return budget_usage_pb2.BudgetUsageInfo(**info) def BudgetUsagesInfo(budget_usage_vos, total_count, **kwargs): - return budget_usage_pb2.BudgetUsagesInfo(results=list( - map(functools.partial(BudgetUsageInfo, **kwargs), budget_usage_vos)), total_count=total_count) + return budget_usage_pb2.BudgetUsagesInfo( + results=list( + map(functools.partial(BudgetUsageInfo, **kwargs), budget_usage_vos) + ), + total_count=total_count, + ) diff --git a/src/spaceone/cost_analysis/info/cost_info.py b/src/spaceone/cost_analysis/info/cost_info.py index 099c2838..1b2da77e 100644 --- a/src/spaceone/cost_analysis/info/cost_info.py +++ b/src/spaceone/cost_analysis/info/cost_info.py @@ -4,40 +4,45 @@ from spaceone.core import utils from spaceone.cost_analysis.model.cost_model import Cost -__all__ = ['CostInfo', 'CostsInfo'] +__all__ = ["CostInfo", "CostsInfo"] def CostInfo(cost_vo: Cost, minimal=False): info = { - 'cost_id': cost_vo.cost_id, - 'cost': cost_vo.cost, - 'provider': cost_vo.provider, - 'region_code': cost_vo.region_code, - 'product': cost_vo.product, - 'usage_type': cost_vo.usage_type, - 'resource': cost_vo.resource, - 'data_source_id': cost_vo.data_source_id, - 'billed_date': cost_vo.billed_date + "cost_id": cost_vo.cost_id, + "cost": cost_vo.cost, + "provider": cost_vo.provider, + "region_code": cost_vo.region_code, + "product": cost_vo.product, + "usage_type": cost_vo.usage_type, + "resource": cost_vo.resource, + "data_source_id": cost_vo.data_source_id, + "billed_date": cost_vo.billed_date, } if not minimal: - info.update({ - 'usage_quantity': cost_vo.usage_quantity, - 'usage_unit': cost_vo.usage_unit, - 'tags': change_struct_type(cost_vo.tags), - 'additional_info': change_struct_type(cost_vo.additional_info), - 'service_account_id': cost_vo.service_account_id, - 'project_id': cost_vo.project_id, - 'project_group_id': cost_vo.project_group_id, - 'data_source_id': cost_vo.data_source_id, - 'domain_id': cost_vo.domain_id, - 'billed_year': cost_vo.billed_year, - 'billed_month': cost_vo.billed_month, - }) + info.update( + { + "usage_quantity": cost_vo.usage_quantity, + "usage_unit": cost_vo.usage_unit, + "tags": change_struct_type(cost_vo.tags), + "additional_info": change_struct_type(cost_vo.additional_info), + "service_account_id": cost_vo.service_account_id, + "project_id": cost_vo.project_id, + "project_group_id": cost_vo.project_group_id, + "data_source_id": cost_vo.data_source_id, + "workspace_id": cost_vo.workspace_id, + "domain_id": cost_vo.domain_id, + "billed_year": cost_vo.billed_year, + "billed_month": cost_vo.billed_month, + } + ) return cost_pb2.CostInfo(**info) def CostsInfo(cost_vos, total_count, **kwargs): - return cost_pb2.CostsInfo(results=list( - map(functools.partial(CostInfo, **kwargs), cost_vos)), total_count=total_count) + return cost_pb2.CostsInfo( + results=list(map(functools.partial(CostInfo, **kwargs), cost_vos)), + total_count=total_count, + ) diff --git a/src/spaceone/cost_analysis/info/data_source_info.py b/src/spaceone/cost_analysis/info/data_source_info.py index fb00aade..cb10ad0d 100644 --- a/src/spaceone/cost_analysis/info/data_source_info.py +++ b/src/spaceone/cost_analysis/info/data_source_info.py @@ -4,16 +4,16 @@ from spaceone.core import utils from spaceone.cost_analysis.model.data_source_model import DataSource -__all__ = ['DataSourceInfo', 'DataSourcesInfo'] +__all__ = ["DataSourceInfo", "DataSourcesInfo"] def SecretFilterInfo(secret_filter_vo): if secret_filter_vo: info = { - 'state': secret_filter_vo.state, - 'secrets': secret_filter_vo.secrets, - 'service_accounts': secret_filter_vo.service_accounts, - 'schemas': secret_filter_vo.schemas + "state": secret_filter_vo.state, + "secrets": secret_filter_vo.secrets, + "service_accounts": secret_filter_vo.service_accounts, + "schemas": secret_filter_vo.schemas, } return data_source_pb2.SecretFilter(**info) else: @@ -23,13 +23,13 @@ def SecretFilterInfo(secret_filter_vo): def PluginInfo(vo): if vo: info = { - 'plugin_id': vo.plugin_id, - 'version': vo.version, - 'options': change_struct_type(vo.options), - 'metadata': change_struct_type(vo.metadata), - 'secret_id': vo.secret_id, - 'schema': vo.schema, - 'upgrade_mode': vo.upgrade_mode + "plugin_id": vo.plugin_id, + "version": vo.version, + "options": change_struct_type(vo.options), + "metadata": change_struct_type(vo.metadata), + "secret_id": vo.secret_id, + "schema": vo.schema, + "upgrade_mode": vo.upgrade_mode, } return data_source_pb2.PluginInfo(**info) @@ -39,31 +39,38 @@ def PluginInfo(vo): def DataSourceInfo(data_source_vo: DataSource, minimal=False): info = { - 'data_source_id': data_source_vo.data_source_id, - 'name': data_source_vo.name, - 'state': data_source_vo.state, - 'data_source_type': data_source_vo.data_source_type, - 'secret_type': data_source_vo.secret_type, - 'provider': data_source_vo.provider + "data_source_id": data_source_vo.data_source_id, + "name": data_source_vo.name, + "state": data_source_vo.state, + "data_source_type": data_source_vo.data_source_type, + "secret_type": data_source_vo.secret_type, + "provider": data_source_vo.provider, + "resource_group": data_source_vo.resource_group, } if not minimal: - info.update({ - 'plugin_info': PluginInfo(data_source_vo.plugin_info), - 'secret_filter': SecretFilterInfo(data_source_vo.secret_filter), - 'template': change_struct_type(data_source_vo.template), - 'tags': change_struct_type(data_source_vo.tags), - 'cost_tag_keys': data_source_vo.cost_tag_keys, - 'cost_additional_info_keys': data_source_vo.cost_additional_info_keys, - 'cost_data_keys': data_source_vo.cost_data_keys, - 'domain_id': data_source_vo.domain_id, - 'created_at': utils.datetime_to_iso8601(data_source_vo.created_at), - 'last_synchronized_at': utils.datetime_to_iso8601(data_source_vo.last_synchronized_at) - }) + info.update( + { + "plugin_info": PluginInfo(data_source_vo.plugin_info), + "secret_filter": SecretFilterInfo(data_source_vo.secret_filter), + "template": change_struct_type(data_source_vo.template), + "tags": change_struct_type(data_source_vo.tags), + "cost_tag_keys": data_source_vo.cost_tag_keys, + "cost_additional_info_keys": data_source_vo.cost_additional_info_keys, + "cost_data_keys": data_source_vo.cost_data_keys, + "domain_id": data_source_vo.domain_id, + "created_at": utils.datetime_to_iso8601(data_source_vo.created_at), + "last_synchronized_at": utils.datetime_to_iso8601( + data_source_vo.last_synchronized_at + ), + } + ) return data_source_pb2.DataSourceInfo(**info) def DataSourcesInfo(data_source_vos, total_count, **kwargs): - return data_source_pb2.DataSourcesInfo(results=list( - map(functools.partial(DataSourceInfo, **kwargs), data_source_vos)), total_count=total_count) + return data_source_pb2.DataSourcesInfo( + results=list(map(functools.partial(DataSourceInfo, **kwargs), data_source_vos)), + total_count=total_count, + ) diff --git a/src/spaceone/cost_analysis/info/data_source_rule_info.py b/src/spaceone/cost_analysis/info/data_source_rule_info.py index 9adfc8f4..7f410171 100644 --- a/src/spaceone/cost_analysis/info/data_source_rule_info.py +++ b/src/spaceone/cost_analysis/info/data_source_rule_info.py @@ -4,9 +4,13 @@ from spaceone.core.pygrpc.message_type import * from spaceone.core import utils -from spaceone.cost_analysis.model.data_source_rule_model import DataSourceRule, DataSourceRuleCondition, DataSourceRuleOptions +from spaceone.cost_analysis.model.data_source_rule_model import ( + DataSourceRule, + DataSourceRuleCondition, + DataSourceRuleOptions, +) -__all__ = ['DataSourceRuleInfo', 'DataSourceRulesInfo'] +__all__ = ["DataSourceRuleInfo", "DataSourceRulesInfo"] def DataSourceRuleConditionsInfo(condition_vos: List[DataSourceRuleCondition]): @@ -16,11 +20,7 @@ def DataSourceRuleConditionsInfo(condition_vos: List[DataSourceRuleCondition]): conditions_info = [] for vo in condition_vos: - info = { - 'key': vo.key, - 'value': vo.value, - 'operator': vo.operator - } + info = {"key": vo.key, "value": vo.value, "operator": vo.operator} conditions_info.append(data_source_rule_pb2.DataSourceRuleCondition(**info)) @@ -32,8 +32,8 @@ def DataSourceRuleActionMatchRuleInfo(match_rule_data): return None info = { - 'source': match_rule_data.get('source'), - 'target': match_rule_data.get('target') + "source": match_rule_data.get("source"), + "target": match_rule_data.get("target"), } return data_source_rule_pb2.MatchRule(**info) @@ -46,9 +46,9 @@ def DataSourceRuleActionsInfo(actions_data): info = {} for key, value in actions_data.items(): - if key in ['match_project', 'match_service_account']: + if key in ["match_project", "match_service_account"]: info[key] = DataSourceRuleActionMatchRuleInfo(value) - elif key == 'add_additional_info': + elif key == "add_additional_info": info[key] = change_struct_type(value) else: info[key] = value @@ -60,36 +60,44 @@ def DataSourceRuleOptionsInfo(vo: DataSourceRuleOptions): if vo is None: return None else: - info = { - 'stop_processing': vo.stop_processing - } + info = {"stop_processing": vo.stop_processing} return data_source_rule_pb2.DataSourceRuleOptions(**info) def DataSourceRuleInfo(data_source_rule_vo: DataSourceRule, minimal=False): info = { - 'data_source_rule_id': data_source_rule_vo.data_source_rule_id, - 'name': data_source_rule_vo.name, - 'order': data_source_rule_vo.order, - 'rule_type': data_source_rule_vo.rule_type, - 'data_source_id': data_source_rule_vo.data_source_id, + "data_source_rule_id": data_source_rule_vo.data_source_rule_id, + "name": data_source_rule_vo.name, + "order": data_source_rule_vo.order, + "rule_type": data_source_rule_vo.rule_type, + "resource_group": data_source_rule_vo.resource_group, + "data_source_id": data_source_rule_vo.data_source_id, } if not minimal: - info.update({ - 'conditions': DataSourceRuleConditionsInfo(data_source_rule_vo.conditions), - 'conditions_policy': data_source_rule_vo.conditions_policy, - 'actions': DataSourceRuleActionsInfo(data_source_rule_vo.actions), - 'options': DataSourceRuleOptionsInfo(data_source_rule_vo.options), - 'tags': change_struct_type(data_source_rule_vo.tags), - 'domain_id': data_source_rule_vo.domain_id, - 'created_at': utils.datetime_to_iso8601(data_source_rule_vo.created_at) - }) + info.update( + { + "conditions": DataSourceRuleConditionsInfo( + data_source_rule_vo.conditions + ), + "conditions_policy": data_source_rule_vo.conditions_policy, + "actions": DataSourceRuleActionsInfo(data_source_rule_vo.actions), + "options": DataSourceRuleOptionsInfo(data_source_rule_vo.options), + "tags": change_struct_type(data_source_rule_vo.tags), + "workspcae_id": data_source_rule_vo.workspace_id, + "domain_id": data_source_rule_vo.domain_id, + "created_at": utils.datetime_to_iso8601(data_source_rule_vo.created_at), + } + ) return data_source_rule_pb2.DataSourceRuleInfo(**info) def DataSourceRulesInfo(data_source_rule_vos, total_count, **kwargs): - return data_source_rule_pb2.DataSourceRulesInfo(results=list( - map(functools.partial(DataSourceRuleInfo, **kwargs), data_source_rule_vos)), total_count=total_count) + return data_source_rule_pb2.DataSourceRulesInfo( + results=list( + map(functools.partial(DataSourceRuleInfo, **kwargs), data_source_rule_vos) + ), + total_count=total_count, + ) diff --git a/src/spaceone/cost_analysis/info/job_info.py b/src/spaceone/cost_analysis/info/job_info.py index 283a8e2f..c3518a75 100644 --- a/src/spaceone/cost_analysis/info/job_info.py +++ b/src/spaceone/cost_analysis/info/job_info.py @@ -5,7 +5,7 @@ from spaceone.core import utils from spaceone.cost_analysis.model.job_model import Job, Changed -__all__ = ['JobInfo', 'JobsInfo'] +__all__ = ["JobInfo", "JobsInfo"] def ChangedInfo(changed_vos: List[Changed]): @@ -16,9 +16,9 @@ def ChangedInfo(changed_vos: List[Changed]): for vo in changed_vos: info = { - 'start': vo.start, - 'end': vo.end, - 'filter': change_struct_type(vo.filter) + "start": vo.start, + "end": vo.end, + "filter": change_struct_type(vo.filter), } changed_info.append(job_pb2.ChangedInfo(**info)) @@ -28,28 +28,34 @@ def ChangedInfo(changed_vos: List[Changed]): def JobInfo(job_vo: Job, minimal=False): info = { - 'job_id': job_vo.job_id, - 'status': job_vo.status, - 'total_tasks': job_vo.total_tasks, - 'remained_tasks': job_vo.remained_tasks, - 'data_source_id': job_vo.data_source_id + "job_id": job_vo.job_id, + "status": job_vo.status, + "total_tasks": job_vo.total_tasks, + "remained_tasks": job_vo.remained_tasks, + "data_source_id": job_vo.data_source_id, + "workspace_id": job_vo.workspace_id, } if not minimal: - info.update({ - 'options': change_struct_type(job_vo.options), - 'error_code': job_vo.error_code, - 'error_message': job_vo.error_message, - 'domain_id': job_vo.domain_id, - 'changed': ChangedInfo(job_vo.changed), - 'created_at': utils.datetime_to_iso8601(job_vo.created_at), - 'updated_at': utils.datetime_to_iso8601(job_vo.updated_at), - 'finished_at': utils.datetime_to_iso8601(job_vo.finished_at) - }) + info.update( + { + "options": change_struct_type(job_vo.options), + "error_code": job_vo.error_code, + "error_message": job_vo.error_message, + "resource_group": job_vo.resource_group, + "domain_id": job_vo.domain_id, + "changed": ChangedInfo(job_vo.changed), + "created_at": utils.datetime_to_iso8601(job_vo.created_at), + "updated_at": utils.datetime_to_iso8601(job_vo.updated_at), + "finished_at": utils.datetime_to_iso8601(job_vo.finished_at), + } + ) return job_pb2.JobInfo(**info) def JobsInfo(job_vos, total_count, **kwargs): - return job_pb2.JobsInfo(results=list( - map(functools.partial(JobInfo, **kwargs), job_vos)), total_count=total_count) + return job_pb2.JobsInfo( + results=list(map(functools.partial(JobInfo, **kwargs), job_vos)), + total_count=total_count, + ) diff --git a/src/spaceone/cost_analysis/info/job_task_info.py b/src/spaceone/cost_analysis/info/job_task_info.py index 86da716a..bfd7382f 100644 --- a/src/spaceone/cost_analysis/info/job_task_info.py +++ b/src/spaceone/cost_analysis/info/job_task_info.py @@ -4,34 +4,40 @@ from spaceone.core import utils from spaceone.cost_analysis.model.job_task_model import JobTask -__all__ = ['JobTaskInfo', 'JobTasksInfo'] +__all__ = ["JobTaskInfo", "JobTasksInfo"] def JobTaskInfo(job_task_vo: JobTask, minimal=False): info = { - 'job_task_id': job_task_vo.job_task_id, - 'status': job_task_vo.status, - 'created_count': job_task_vo.created_count, - 'job_id': job_task_vo.job_id, - 'data_source_id': job_task_vo.data_source_id + "job_task_id": job_task_vo.job_task_id, + "status": job_task_vo.status, + "created_count": job_task_vo.created_count, + "job_id": job_task_vo.job_id, + "data_source_id": job_task_vo.data_source_id, } if not minimal: - info.update({ - 'options': change_struct_type(job_task_vo.options), - 'error_code': job_task_vo.error_code, - 'error_message': job_task_vo.error_message, - 'job_id': job_task_vo.job_id, - 'domain_id': job_task_vo.domain_id, - 'created_at': utils.datetime_to_iso8601(job_task_vo.created_at), - 'started_at': utils.datetime_to_iso8601(job_task_vo.started_at), - 'updated_at': utils.datetime_to_iso8601(job_task_vo.updated_at), - 'finished_at': utils.datetime_to_iso8601(job_task_vo.finished_at) - }) + info.update( + { + "options": change_struct_type(job_task_vo.options), + "error_code": job_task_vo.error_code, + "error_message": job_task_vo.error_message, + "resource_group": job_task_vo.resource_group, + "job_id": job_task_vo.job_id, + "workspace_id": job_task_vo.workspace_id, + "domain_id": job_task_vo.domain_id, + "created_at": utils.datetime_to_iso8601(job_task_vo.created_at), + "started_at": utils.datetime_to_iso8601(job_task_vo.started_at), + "updated_at": utils.datetime_to_iso8601(job_task_vo.updated_at), + "finished_at": utils.datetime_to_iso8601(job_task_vo.finished_at), + } + ) return job_task_pb2.JobTaskInfo(**info) def JobTasksInfo(job_task_vos, total_count, **kwargs): - return job_task_pb2.JobTasksInfo(results=list( - map(functools.partial(JobTaskInfo, **kwargs), job_task_vos)), total_count=total_count) + return job_task_pb2.JobTasksInfo( + results=list(map(functools.partial(JobTaskInfo, **kwargs), job_task_vos)), + total_count=total_count, + ) diff --git a/src/spaceone/cost_analysis/manager/__init__.py b/src/spaceone/cost_analysis/manager/__init__.py index 6fe3111f..48209197 100644 --- a/src/spaceone/cost_analysis/manager/__init__.py +++ b/src/spaceone/cost_analysis/manager/__init__.py @@ -1,10 +1,16 @@ from spaceone.cost_analysis.manager.data_source_manager import DataSourceManager -from spaceone.cost_analysis.manager.data_source_rule_manager import DataSourceRuleManager -from spaceone.cost_analysis.manager.data_source_plugin_manager import DataSourcePluginManager +from spaceone.cost_analysis.manager.data_source_rule_manager import ( + DataSourceRuleManager, +) +from spaceone.cost_analysis.manager.data_source_plugin_manager import ( + DataSourcePluginManager, +) from spaceone.cost_analysis.manager.plugin_manager import PluginManager from spaceone.cost_analysis.manager.repository_manager import RepositoryManager from spaceone.cost_analysis.manager.cost_manager import CostManager -from spaceone.cost_analysis.manager.data_source_rule_manager import DataSourceRuleManager +from spaceone.cost_analysis.manager.data_source_rule_manager import ( + DataSourceRuleManager, +) from spaceone.cost_analysis.manager.budget_manager import BudgetManager from spaceone.cost_analysis.manager.budget_usage_manager import BudgetUsageManager from spaceone.cost_analysis.manager.cost_query_set_manager import CostQuerySetManager diff --git a/src/spaceone/cost_analysis/manager/budget_manager.py b/src/spaceone/cost_analysis/manager/budget_manager.py index 520794ee..b495f8d1 100644 --- a/src/spaceone/cost_analysis/manager/budget_manager.py +++ b/src/spaceone/cost_analysis/manager/budget_manager.py @@ -7,16 +7,17 @@ class BudgetManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.budget_model: Budget = self.locator.get_model('Budget') + self.budget_model: Budget = self.locator.get_model("Budget") def create_budget(self, params): def _rollback(budget_vo): - _LOGGER.info(f'[create_budget._rollback] ' - f'Delete budget : {budget_vo.name} ' - f'({budget_vo.budget_id})') + _LOGGER.info( + f"[create_budget._rollback] " + f"Delete budget : {budget_vo.name} " + f"({budget_vo.budget_id})" + ) budget_vo.delete() budget_vo: Budget = self.budget_model.create(params) @@ -25,24 +26,31 @@ def _rollback(budget_vo): return budget_vo def update_budget(self, params): - budget_vo: Budget = self.get_budget(params['budget_id'], params['domain_id']) + budget_vo: Budget = self.get_budget(params["budget_id"], params["domain_id"]) return self.update_budget_by_vo(params, budget_vo) def update_budget_by_vo(self, params, budget_vo): def _rollback(old_data): - _LOGGER.info(f'[update_budget_by_vo._rollback] Revert Data : ' - f'{old_data["budget_id"]}') + _LOGGER.info( + f"[update_budget_by_vo._rollback] Revert Data : " + f'{old_data["budget_id"]}' + ) budget_vo.update(old_data) self.transaction.add_rollback(_rollback, budget_vo.to_dict()) return budget_vo.update(params) - def delete_budget(self, budget_id, domain_id): - budget_vo: Budget = self.get_budget(budget_id, domain_id) + def delete_budget(self, budget_id, workspace_id, domain_id): + budget_vo: Budget = self.get_budget(budget_id, domain_id, workspace_id) budget_vo.delete() - def get_budget(self, budget_id, domain_id, only=None): - return self.budget_model.get(budget_id=budget_id, domain_id=domain_id, only=only) + def get_budget(self, budget_id: str, domain_id: str, workspace_id: str = None): + conditions = {"budget_id": budget_id, "domain_id": domain_id} + + if workspace_id: + conditions["workspace_id"] = workspace_id + + return self.budget_model.get(**conditions) def filter_budgets(self, **conditions): return self.budget_model.filter(**conditions) diff --git a/src/spaceone/cost_analysis/manager/budget_usage_manager.py b/src/spaceone/cost_analysis/manager/budget_usage_manager.py index 51964a96..42b3f5d2 100644 --- a/src/spaceone/cost_analysis/manager/budget_usage_manager.py +++ b/src/spaceone/cost_analysis/manager/budget_usage_manager.py @@ -16,78 +16,85 @@ class BudgetUsageManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.budget_mgr: BudgetManager = self.locator.get_manager('BudgetManager') - self.budget_usage_model: BudgetUsage = self.locator.get_model('BudgetUsage') - self.identity_mgr: IdentityManager = self.locator.get_manager('IdentityManager') - self.notification_mgr: NotificationManager = self.locator.get_manager('NotificationManager') - self.data_source_mgr: DataSourceManager = self.locator.get_manager('DataSourceManager') + self.budget_mgr: BudgetManager = self.locator.get_manager("BudgetManager") + self.budget_usage_model: BudgetUsage = self.locator.get_model("BudgetUsage") + self.identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") + self.notification_mgr: NotificationManager = self.locator.get_manager( + "NotificationManager" + ) + self.data_source_mgr: DataSourceManager = self.locator.get_manager( + "DataSourceManager" + ) def create_budget_usages(self, budget_vo: Budget): - if budget_vo.time_unit == 'TOTAL': - start_dt = datetime.strptime(budget_vo.start, '%Y-%m') - end_dt = datetime.strptime(budget_vo.end, '%Y-%m') + if budget_vo.time_unit == "TOTAL": + start_dt = datetime.strptime(budget_vo.start, "%Y-%m") + end_dt = datetime.strptime(budget_vo.end, "%Y-%m") dts = [dt for dt in rrule(MONTHLY, dtstart=start_dt, until=end_dt)] limit_per_month = round(budget_vo.limit / len(dts), 3) for dt in dts: budget_usage_data = { - 'budget_id': budget_vo.budget_id, - 'name': budget_vo.name, - 'date': dt.strftime("%Y-%m"), - 'cost': 0, - 'limit': limit_per_month, - 'currency': budget_vo.currency, - 'provider_filter': budget_vo.provider_filter.to_dict(), - 'budget': budget_vo, - 'project_id': budget_vo.project_id, - 'project_group_id': budget_vo.project_group_id, - 'data_source_id': budget_vo.data_source_id, - 'domain_id': budget_vo.domain_id + "budget_id": budget_vo.budget_id, + "name": budget_vo.name, + "date": dt.strftime("%Y-%m"), + "cost": 0, + "limit": limit_per_month, + "currency": budget_vo.currency, + "provider_filter": budget_vo.provider_filter.to_dict(), + "budget": budget_vo, + "project_id": budget_vo.project_id, + "project_group_id": budget_vo.project_group_id, + "data_source_id": budget_vo.data_source_id, + "domain_id": budget_vo.domain_id, } - self.budget_usage_model.create(budget_usage_data) + budget_usage_vo = self.budget_usage_model.create(budget_usage_data) else: for planned_limit in budget_vo.planned_limits: budget_usage_data = { - 'budget_id': budget_vo.budget_id, - 'name': budget_vo.name, - 'date': planned_limit['date'], - 'cost': 0, - 'limit': planned_limit.limit, - 'currency': budget_vo.currency, - 'provider_filter': budget_vo.provider_filter.to_dict(), - 'budget': budget_vo, - 'project_id': budget_vo.project_id, - 'project_group_id': budget_vo.project_group_id, - 'data_source_id': budget_vo.data_source_id, - 'domain_id': budget_vo.domain_id + "budget_id": budget_vo.budget_id, + "name": budget_vo.name, + "date": planned_limit["date"], + "cost": 0, + "limit": planned_limit.limit, + "currency": budget_vo.currency, + "provider_filter": budget_vo.provider_filter.to_dict(), + "budget": budget_vo, + "project_id": budget_vo.project_id, + "project_group_id": budget_vo.project_group_id, + "data_source_id": budget_vo.data_source_id, + "domain_id": budget_vo.domain_id, } - self.budget_usage_model.create(budget_usage_data) + budget_usage_vo = self.budget_usage_model.create(budget_usage_data) def update_budget_usage_by_vo(self, params, budget_usage_vo): def _rollback(old_data): - _LOGGER.info(f'[update_budget_usage_by_vo._rollback] Revert Data : ' - f'{old_data["budget_id"]} / {old_data["date"]}') + _LOGGER.info( + f"[update_budget_usage_by_vo._rollback] Revert Data : " + f'{old_data["budget_id"]} / {old_data["date"]}' + ) budget_usage_vo.update(old_data) self.transaction.add_rollback(_rollback, budget_usage_vo.to_dict()) return budget_usage_vo.update(params) - def update_cost_usage(self, budget_id, domain_id): - _LOGGER.info(f'[update_cost_usage] Update Budget Usage: {budget_id}') - cost_mgr: CostManager = self.locator.get_manager('CostManager') + def update_cost_usage(self, budget_id, workspace_id, domain_id): + _LOGGER.info(f"[update_cost_usage] Update Budget Usage: {budget_id}") + cost_mgr: CostManager = self.locator.get_manager("CostManager") - budget_vo = self.budget_mgr.get_budget(budget_id, domain_id) + budget_vo = self.budget_mgr.get_budget(budget_id, domain_id, workspace_id) self._update_monthly_budget_usage(budget_vo, cost_mgr) def update_budget_usage(self, domain_id, data_source_id): - budget_vos = self.budget_mgr.filter_budgets(domain_id=domain_id, data_source_id=data_source_id) + budget_vos = self.budget_mgr.filter_budgets( + domain_id=domain_id, data_source_id=data_source_id + ) for budget_vo in budget_vos: self.update_cost_usage(budget_vo.budget_id, domain_id) self.notify_budget_usage(budget_vo) @@ -95,7 +102,7 @@ def update_budget_usage(self, domain_id, data_source_id): def notify_budget_usage(self, budget_vo: Budget): budget_id = budget_vo.budget_id domain_id = budget_vo.domain_id - current_month = datetime.now().strftime('%Y-%m') + current_month = datetime.now().strftime("%Y-%m") updated_notifications = [] is_changed = False for notification in budget_vo.notifications: @@ -105,23 +112,30 @@ def notify_budget_usage(self, budget_vo: Budget): notification_type = notification.notification_type is_notify = False - if budget_vo.time_unit == 'MONTHLY': - budget_usage_vos = self.filter_budget_usages(budget_id=budget_id, domain_id=domain_id) - total_budget_usage = sum([budget_usage_vo.cost for budget_usage_vo in budget_usage_vos]) + if budget_vo.time_unit == "MONTHLY": + budget_usage_vos = self.filter_budget_usages( + budget_id=budget_id, domain_id=domain_id + ) + total_budget_usage = sum( + [budget_usage_vo.cost for budget_usage_vo in budget_usage_vos] + ) budget_limit = budget_vo.limit else: - budget_usage_vos = self.filter_budget_usages(budget_id=budget_id, domain_id=domain_id, - date=current_month) + budget_usage_vos = self.filter_budget_usages( + budget_id=budget_id, domain_id=domain_id, date=current_month + ) total_budget_usage = budget_usage_vos[0].cost budget_limit = budget_usage_vos[0].limit if budget_limit == 0: - _LOGGER.debug(f'[notify_budget_usage] budget_limit is 0: {budget_id}') + _LOGGER.debug( + f"[notify_budget_usage] budget_limit is 0: {budget_id}" + ) continue budget_percentage = round(total_budget_usage / budget_limit * 100, 2) - if unit == 'PERCENT': + if unit == "PERCENT": if budget_percentage > threshold: is_notify = True is_changed = True @@ -131,26 +145,45 @@ def notify_budget_usage(self, budget_vo: Budget): is_changed = True if is_notify: - _LOGGER.debug(f'[notify_budget_usage] notify event: {budget_id} (level: {notification_type})') + _LOGGER.debug( + f"[notify_budget_usage] notify event: {budget_id} (level: {notification_type})" + ) try: - self._notify_message(budget_vo, current_month, total_budget_usage, budget_limit, - budget_percentage, threshold, unit, notification_type) - - updated_notifications.append({ - 'threshold': threshold, - 'unit': unit, - 'notification_type': notification_type, - 'notified_months': notification.notified_months + [current_month] - }) + self._notify_message( + budget_vo, + current_month, + total_budget_usage, + budget_limit, + budget_percentage, + threshold, + unit, + notification_type, + ) + + updated_notifications.append( + { + "threshold": threshold, + "unit": unit, + "notification_type": notification_type, + "notified_months": notification.notified_months + + [current_month], + } + ) except Exception as e: - _LOGGER.error(f'[notify_budget_usage] Failed to notify message ({budget_id}): {e}') + _LOGGER.error( + f"[notify_budget_usage] Failed to notify message ({budget_id}): {e}" + ) else: - if unit == 'PERCENT': - _LOGGER.debug(f'[notify_budget_usage] skip notification: {budget_id} ' - f'(usage percent: {budget_percentage}%, threshold: {threshold}%)') + if unit == "PERCENT": + _LOGGER.debug( + f"[notify_budget_usage] skip notification: {budget_id} " + f"(usage percent: {budget_percentage}%, threshold: {threshold}%)" + ) else: - _LOGGER.debug(f'[notify_budget_usage] skip notification: {budget_id} ' - f'(usage cost: {total_budget_usage}, threshold: {threshold})') + _LOGGER.debug( + f"[notify_budget_usage] skip notification: {budget_id} " + f"(usage cost: {total_budget_usage}, threshold: {threshold})" + ) updated_notifications.append(notification.to_dict()) @@ -158,76 +191,81 @@ def notify_budget_usage(self, budget_vo: Budget): updated_notifications.append(notification.to_dict()) if is_changed: - budget_vo.update({'notifications': updated_notifications}) - - def _notify_message(self, budget_vo: Budget, current_month, total_budget_usage, budget_limit, budget_percentage, - threshold, unit, notification_type): - data_source_name = self.data_source_mgr.get_data_source(budget_vo.data_source_id, budget_vo.domain_id).name - project_name = self.identity_mgr.get_project_name(budget_vo.project_id, budget_vo.domain_id) - - if unit == 'PERCENT': - threshold_str = f'{int(threshold)}%' + budget_vo.update({"notifications": updated_notifications}) + + def _notify_message( + self, + budget_vo: Budget, + current_month, + total_budget_usage, + budget_limit, + budget_percentage, + threshold, + unit, + notification_type, + ): + data_source_name = self.data_source_mgr.get_data_source( + budget_vo.data_source_id, budget_vo.domain_id + ).name + project_name = self.identity_mgr.get_project_name( + budget_vo.project_id, budget_vo.domain_id + ) + + if unit == "PERCENT": + threshold_str = f"{int(threshold)}%" else: - threshold_str = format(int(threshold), ',') - - description = f'Please check the budget usage and increase the budget limit if necessary.\n\n' - description += (f'Budget Usage (Currency: {budget_vo.currency}): \n' - f'- Usage Cost: {format(round(total_budget_usage, 2), ",")}\n' - f'- Limit: {format(budget_limit, ",")}\n' - f'- Percentage: {budget_percentage}%\n' - f'- Threshold: > {threshold_str}\n') - - if budget_vo.time_unit == 'MONTHLY': - period = f'{current_month} ~ {current_month}' + threshold_str = format(int(threshold), ",") + + description = f"Please check the budget usage and increase the budget limit if necessary.\n\n" + description += ( + f"Budget Usage (Currency: {budget_vo.currency}): \n" + f'- Usage Cost: {format(round(total_budget_usage, 2), ",")}\n' + f'- Limit: {format(budget_limit, ",")}\n' + f"- Percentage: {budget_percentage}%\n" + f"- Threshold: > {threshold_str}\n" + ) + + if budget_vo.time_unit == "MONTHLY": + period = f"{current_month} ~ {current_month}" else: - period = f'{budget_vo.start} ~ {budget_vo.end}' + period = f"{budget_vo.start} ~ {budget_vo.end}" message = { - 'resource_type': 'identity.Project', - 'resource_id': budget_vo.project_id, - 'notification_type': 'WARNING' if notification_type == 'WARNING' else 'ERROR', - 'topic': 'cost_analysis.Budget', - 'message': { - 'title': f'Budget usage exceeded - {budget_vo.name}', - 'description': description, - 'tags': [ + "resource_type": "identity.Project", + "resource_id": budget_vo.project_id, + "notification_type": "WARNING" + if notification_type == "WARNING" + else "ERROR", + "topic": "cost_analysis.Budget", + "message": { + "title": f"Budget usage exceeded - {budget_vo.name}", + "description": description, + "tags": [ { - 'key': 'Budget ID', - 'value': budget_vo.budget_id, - 'options': { - 'short': True - } + "key": "Budget ID", + "value": budget_vo.budget_id, + "options": {"short": True}, }, { - 'key': 'Budget Name', - 'value': budget_vo.name, - 'options': { - 'short': True - } + "key": "Budget Name", + "value": budget_vo.name, + "options": {"short": True}, }, { - 'key': 'Data Source', - 'value': data_source_name, - 'options': { - 'short': True - } + "key": "Data Source", + "value": data_source_name, + "options": {"short": True}, }, + {"key": "Period", "value": period, "options": {"short": True}}, { - 'key': 'Period', - 'value': period, - 'options': { - 'short': True - } + "key": "Project", + "value": project_name, }, - { - 'key': 'Project', - 'value': project_name, - } ], - 'occurred_at': utils.datetime_to_iso8601(datetime.utcnow()), + "occurred_at": utils.datetime_to_iso8601(datetime.utcnow()), }, - 'notification_level': 'ALL', - 'domain_id': budget_vo.domain_id + "notification_level": "ALL", + "domain_id": budget_vo.domain_id, } self.notification_mgr.create_notification(message) @@ -242,59 +280,66 @@ def stat_budget_usages(self, query): return self.budget_usage_model.stat(**query) def analyze_budget_usages(self, query): - query['date_field'] = 'date' - query['date_field_format'] = '%Y-%m' + query["date_field"] = "date" + query["date_field_format"] = "%Y-%m" return self.budget_usage_model.analyze(**query) def _update_monthly_budget_usage(self, budget_vo: Budget, cost_mgr: CostManager): update_data = {} query = self._make_cost_analyze_query(budget_vo) - _LOGGER.debug(f'[_update_monthly_budget_usage]: query: {query}') + _LOGGER.debug(f"[_update_monthly_budget_usage]: query: {query}") - result = cost_mgr.analyze_costs_by_granularity(query, budget_vo.domain_id, budget_vo.data_source_id) - for cost_usage_data in result.get('results', []): - if date := cost_usage_data.get('date'): - update_data[date] = cost_usage_data.get('cost', 0) + result = cost_mgr.analyze_costs_by_granularity( + query, budget_vo.domain_id, budget_vo.data_source_id + ) + for cost_usage_data in result.get("results", []): + if date := cost_usage_data.get("date"): + update_data[date] = cost_usage_data.get("cost", 0) budget_usage_vos = self.budget_usage_model.filter(budget_id=budget_vo.budget_id) for budget_usage_vo in budget_usage_vos: if budget_usage_vo.date in update_data: - budget_usage_vo.update({'cost': update_data[budget_usage_vo.date]}) + budget_usage_vo.update({"cost": update_data[budget_usage_vo.date]}) else: - budget_usage_vo.update({'cost': 0}) + budget_usage_vo.update({"cost": 0}) def _make_cost_analyze_query(self, budget_vo: Budget): query = { - 'granularity': 'MONTHLY', - 'start': budget_vo.start, - 'end': budget_vo.end, - 'fields': { - 'cost': { - 'key': 'cost', - 'operator': 'sum' - } - }, - 'filter': [ - {'k': 'domain_id', 'v': budget_vo.domain_id, 'o': 'eq'}, - {'k': 'data_source_id', 'v': budget_vo.data_source_id, 'o': 'eq'}, - ] + "granularity": "MONTHLY", + "start": budget_vo.start, + "end": budget_vo.end, + "fields": {"cost": {"key": "cost", "operator": "sum"}}, + "filter": [ + {"k": "domain_id", "v": budget_vo.domain_id, "o": "eq"}, + {"k": "data_source_id", "v": budget_vo.data_source_id, "o": "eq"}, + ], } if budget_vo.project_id: - query['filter'].append({'k': 'project_id', 'v': budget_vo.project_id, 'o': 'eq'}) + query["filter"].append( + {"k": "project_id", "v": budget_vo.project_id, "o": "eq"} + ) else: - identity_mgr: IdentityManager = self.locator.get_manager('IdentityManager') - response = identity_mgr.list_projects_in_project_group(budget_vo.project_group_id, - budget_vo.domain_id, True) + identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") + + query = { + "filter": [ + {"k": "project_id", "v": budget_vo.project_id, "o": "eq"}, + {"k": "domain_id", "v": budget_vo.domain_id, "o": "eq"}, + ] + } + project_vos = identity_mgr.list_projects(query) project_ids = [] - for project_info in response.get('results', []): - project_ids.append(project_info['project_id']) + for project_vo in project_vos: + project_ids.append(project_vo.project_id) - query['filter'].append({'k': 'project_id', 'v': project_ids, 'o': 'in'}) + query["filter"].append({"k": "project_id", "v": project_ids, "o": "in"}) - if budget_vo.provider_filter and budget_vo.provider_filter.state == 'ENABLED': - query['filter'].append({'k': 'provider', 'v': budget_vo.provider_filter.providers, 'o': 'in'}) + if budget_vo.provider_filter and budget_vo.provider_filter.state == "ENABLED": + query["filter"].append( + {"k": "provider", "v": budget_vo.provider_filter.providers, "o": "in"} + ) return query diff --git a/src/spaceone/cost_analysis/manager/cost_manager.py b/src/spaceone/cost_analysis/manager/cost_manager.py index 4a0ad525..1efb8a6f 100644 --- a/src/spaceone/cost_analysis/manager/cost_manager.py +++ b/src/spaceone/cost_analysis/manager/cost_manager.py @@ -7,44 +7,53 @@ from spaceone.core.manager import BaseManager from spaceone.cost_analysis.error import * from spaceone.cost_analysis.model.cost_model import Cost, MonthlyCost, CostQueryHistory -from spaceone.cost_analysis.manager.data_source_rule_manager import DataSourceRuleManager +from spaceone.cost_analysis.manager.data_source_rule_manager import ( + DataSourceRuleManager, +) from spaceone.cost_analysis.manager.identity_manager import IdentityManager _LOGGER = logging.getLogger(__name__) class CostManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.cost_model: Cost = self.locator.get_model('Cost') - self.monthly_cost_model: MonthlyCost = self.locator.get_model('MonthlyCost') - self.cost_query_history_model: CostQueryHistory = self.locator.get_model('CostQueryHistory') - self.data_source_rule_mgr: DataSourceRuleManager = self.locator.get_manager('DataSourceRuleManager') + self.cost_model: Cost = self.locator.get_model("Cost") + self.monthly_cost_model: MonthlyCost = self.locator.get_model("MonthlyCost") + self.cost_query_history_model: CostQueryHistory = self.locator.get_model( + "CostQueryHistory" + ) + self.data_source_rule_mgr: DataSourceRuleManager = self.locator.get_manager( + "DataSourceRuleManager" + ) self.project_group_map = None def create_cost(self, params, execute_rollback=True): def _rollback(cost_vo): - _LOGGER.info(f'[create_cost._rollback] ' - f'Delete cost : {cost_vo.name} ' - f'({cost_vo.cost_id})') + _LOGGER.info( + f"[create_cost._rollback] " + f"Delete cost : {cost_vo.name} " + f"({cost_vo.cost_id})" + ) cost_vo.delete() if self.project_group_map is None: - self.project_group_map = self._get_project_group_map(params['domain_id']) + self.project_group_map = self._get_project_group_map(params["domain_id"]) - if 'region_code' in params and 'provider' in params: - params['region_key'] = f'{params["provider"]}.{params["region_code"]}' + if "region_code" in params and "provider" in params: + params["region_key"] = f'{params["provider"]}.{params["region_code"]}' - billed_at = self._get_billed_at_from_billed_date(params['billed_date']) + billed_at = self._get_billed_at_from_billed_date(params["billed_date"]) - params['billed_year'] = billed_at.strftime('%Y') - params['billed_month'] = billed_at.strftime('%Y-%m') + params["billed_year"] = billed_at.strftime("%Y") + params["billed_month"] = billed_at.strftime("%Y-%m") params = self.data_source_rule_mgr.change_cost_data(params) - if 'project_id' in params: - params['project_group_id'] = self.project_group_map.get(params['project_id']) + if "project_id" in params: + params["project_group_id"] = self.project_group_map.get( + params["project_id"] + ) cost_vo: Cost = self.cost_model.create(params) @@ -65,18 +74,28 @@ def delete_cost_by_vo(cost_vo: Cost): cost_vo.delete() def delete_cost_with_datasource(self, domain_id, data_source_id): - _LOGGER.debug(f'[delete_cost_with_datasource] data_source_id: {data_source_id}') - cost_vos = self.cost_model.filter(domain_id=domain_id, data_source_id=data_source_id) + _LOGGER.debug(f"[delete_cost_with_datasource] data_source_id: {data_source_id}") + cost_vos = self.cost_model.filter( + domain_id=domain_id, data_source_id=data_source_id + ) cost_vos.delete() - monthly_cost_vos = self.monthly_cost_model.filter(domain_id=domain_id, data_source_id=data_source_id) + monthly_cost_vos = self.monthly_cost_model.filter( + domain_id=domain_id, data_source_id=data_source_id + ) monthly_cost_vos.delete() - history_vos = self.cost_query_history_model.filter(domain_id=domain_id, data_source_id=data_source_id) + history_vos = self.cost_query_history_model.filter( + domain_id=domain_id, data_source_id=data_source_id + ) history_vos.delete() - def get_cost(self, cost_id, domain_id, only=None): - return self.cost_model.get(cost_id=cost_id, domain_id=domain_id, only=only) + def get_cost(self, cost_id, domain_id, workspace_id=None): + conditions = {"cost_id": cost_id, "domain_id": domain_id} + + if workspace_id: + conditions["workspace_id"] = workspace_id + return self.cost_model.get(**conditions) def filter_costs(self, **conditions): return self.cost_model.filter(**conditions) @@ -96,165 +115,217 @@ def list_monthly_costs(self, query={}): def stat_monthly_costs(self, query): return self.monthly_cost_model.stat(**query) - def analyze_costs(self, query, target='SECONDARY_PREFERRED'): - query['target'] = target - query['date_field'] = 'billed_date' - query['date_field_format'] = '%Y-%m-%d' - _LOGGER.debug(f'[analyze_costs] query: {query}') + def analyze_costs(self, query, target="SECONDARY_PREFERRED"): + query["target"] = target + query["date_field"] = "billed_date" + query["date_field_format"] = "%Y-%m-%d" + _LOGGER.debug(f"[analyze_costs] query: {query}") return self.cost_model.analyze(**query) - def analyze_monthly_costs(self, query, target='SECONDARY_PREFERRED'): - query['target'] = target - query['date_field'] = 'billed_month' - query['date_field_format'] = '%Y-%m' - _LOGGER.debug(f'[analyze_monthly_costs] query: {query}') + def analyze_monthly_costs(self, query, target="SECONDARY_PREFERRED"): + query["target"] = target + query["date_field"] = "billed_month" + query["date_field_format"] = "%Y-%m" + _LOGGER.debug(f"[analyze_monthly_costs] query: {query}") return self.monthly_cost_model.analyze(**query) - def analyze_yearly_costs(self, query, target='SECONDARY_PREFERRED'): - query['target'] = target - query['date_field'] = 'billed_year' - query['date_field_format'] = '%Y' - _LOGGER.debug(f'[analyze_yearly_costs] query: {query}') + def analyze_yearly_costs(self, query, target="SECONDARY_PREFERRED"): + query["target"] = target + query["date_field"] = "billed_year" + query["date_field_format"] = "%Y" + _LOGGER.debug(f"[analyze_yearly_costs] query: {query}") return self.monthly_cost_model.analyze(**query) - @cache.cacheable(key='stat-costs:monthly:{domain_id}:{domain_id}:{query_hash}', expire=3600 * 24) - def stat_monthly_costs_with_cache(self, query, query_hash, domain_id, data_source_id): + @cache.cacheable( + key="stat-costs:monthly:{domain_id}:{domain_id}:{query_hash}", expire=3600 * 24 + ) + def stat_monthly_costs_with_cache( + self, query, query_hash, domain_id, data_source_id + ): return self.stat_monthly_costs(query) - @cache.cacheable(key='analyze-costs:daily:{domain_id}:{data_source_id}:{query_hash}', expire=3600 * 24) - def analyze_costs_with_cache(self, query, query_hash, domain_id, data_source_id, target='SECONDARY_PREFERRED'): + @cache.cacheable( + key="analyze-costs:daily:{domain_id}:{data_source_id}:{query_hash}", + expire=3600 * 24, + ) + def analyze_costs_with_cache( + self, query, query_hash, domain_id, data_source_id, target="SECONDARY_PREFERRED" + ): return self.analyze_costs(query, target) - @cache.cacheable(key='analyze-costs:monthly:{domain_id}:{data_source_id}:{query_hash}', expire=3600 * 24) - def analyze_monthly_costs_with_cache(self, query, query_hash, domain_id, data_source_id, target='SECONDARY_PREFERRED'): + @cache.cacheable( + key="analyze-costs:monthly:{domain_id}:{data_source_id}:{query_hash}", + expire=3600 * 24, + ) + def analyze_monthly_costs_with_cache( + self, query, query_hash, domain_id, data_source_id, target="SECONDARY_PREFERRED" + ): return self.analyze_monthly_costs(query, target) - @cache.cacheable(key='analyze-costs:yearly:{domain_id}:{data_source_id}:{query_hash}', expire=3600 * 24) - def analyze_yearly_costs_with_cache(self, query, query_hash, domain_id, data_source_id, target='SECONDARY_PREFERRED'): + @cache.cacheable( + key="analyze-costs:yearly:{domain_id}:{data_source_id}:{query_hash}", + expire=3600 * 24, + ) + def analyze_yearly_costs_with_cache( + self, query, query_hash, domain_id, data_source_id, target="SECONDARY_PREFERRED" + ): return self.analyze_yearly_costs(query, target) def analyze_costs_by_granularity(self, query, domain_id, data_source_id): self._check_date_range(query) - granularity = query['granularity'] + granularity = query["granularity"] # Save query history to speed up data loading query_hash = utils.dict_to_hash(query) self.create_cost_query_history(query, query_hash, domain_id, data_source_id) - if granularity == 'DAILY': - response = self.analyze_costs_with_cache(query, query_hash, domain_id, data_source_id) - elif granularity == 'MONTHLY': - response = self.analyze_monthly_costs_with_cache(query, query_hash, domain_id, data_source_id) + if granularity == "DAILY": + response = self.analyze_costs_with_cache( + query, query_hash, domain_id, data_source_id + ) + elif granularity == "MONTHLY": + response = self.analyze_monthly_costs_with_cache( + query, query_hash, domain_id, data_source_id + ) else: - response = self.analyze_yearly_costs_with_cache(query, query_hash, domain_id, data_source_id) + response = self.analyze_yearly_costs_with_cache( + query, query_hash, domain_id, data_source_id + ) return response - @cache.cacheable(key='cost-query-history:{domain_id}:{data_source_id}:{query_hash}', expire=600) + @cache.cacheable( + key="cost-query-history:{domain_id}:{data_source_id}:{query_hash}", expire=600 + ) def create_cost_query_history(self, query, query_hash, domain_id, data_source_id): def _rollback(history_vo): - _LOGGER.info(f'[create_cost_query_history._rollback] Delete cost query history: {query_hash}') + _LOGGER.info( + f"[create_cost_query_history._rollback] Delete cost query history: {query_hash}" + ) history_vo.delete() - history_model: CostQueryHistory = self.locator.get_model('CostQueryHistory') + history_model: CostQueryHistory = self.locator.get_model("CostQueryHistory") history_vos = history_model.filter(query_hash=query_hash, domain_id=domain_id) if history_vos.count() == 0: - history_vo = history_model.create({ - 'query_hash': query_hash, - 'query_options': copy.deepcopy(query), - 'data_source_id': data_source_id, - 'domain_id': domain_id - }) + history_vo = history_model.create( + { + "query_hash": query_hash, + "query_options": copy.deepcopy(query), + "data_source_id": data_source_id, + "domain_id": domain_id, + } + ) self.transaction.add_rollback(_rollback, history_vo) else: history_vos[0].update({}) def list_cost_query_history(self, query={}): - history_model: CostQueryHistory = self.locator.get_model('CostQueryHistory') + history_model: CostQueryHistory = self.locator.get_model("CostQueryHistory") return history_model.query(**query) @staticmethod def remove_stat_cache(domain_id, data_source_id): - cache.delete_pattern(f'analyze-costs:*:{domain_id}:{data_source_id}:*') - cache.delete_pattern(f'stat-costs:*:{domain_id}:{data_source_id}:*') - cache.delete_pattern(f'cost-query-history:{domain_id}:{data_source_id}:*') + cache.delete_pattern(f"analyze-costs:*:{domain_id}:{data_source_id}:*") + cache.delete_pattern(f"stat-costs:*:{domain_id}:{data_source_id}:*") + cache.delete_pattern(f"cost-query-history:{domain_id}:{data_source_id}:*") def _check_date_range(self, query): - start_str = query.get('start') - end_str = query.get('end') - granularity = query.get('granularity') + start_str = query.get("start") + end_str = query.get("end") + granularity = query.get("granularity") start = self._parse_start_time(start_str, granularity) end = self._parse_end_time(end_str, granularity) now = datetime.utcnow().date() if len(start_str) != len(end_str): - raise ERROR_INVALID_DATE_RANGE(start=start_str, end=end_str, - reason='Start date and end date must be the same format.') + raise ERROR_INVALID_DATE_RANGE( + start=start_str, + end=end_str, + reason="Start date and end date must be the same format.", + ) if start >= end: - raise ERROR_INVALID_DATE_RANGE(start=start_str, end=end_str, - reason='End date must be greater than start date.') + raise ERROR_INVALID_DATE_RANGE( + start=start_str, + end=end_str, + reason="End date must be greater than start date.", + ) - if granularity == 'DAILY': + if granularity == "DAILY": if start + relativedelta(months=1) < end: - raise ERROR_INVALID_DATE_RANGE(start=start_str, end=end_str, - reason='Request up to a maximum of 1 month.') + raise ERROR_INVALID_DATE_RANGE( + start=start_str, + end=end_str, + reason="Request up to a maximum of 1 month.", + ) if start + relativedelta(months=12) < now.replace(day=1): - raise ERROR_INVALID_DATE_RANGE(start=start_str, end=end_str, - reason='For DAILY, you cannot request data older than 1 year.') + raise ERROR_INVALID_DATE_RANGE( + start=start_str, + end=end_str, + reason="For DAILY, you cannot request data older than 1 year.", + ) - elif granularity == 'MONTHLY': + elif granularity == "MONTHLY": if start + relativedelta(months=12) < end: - raise ERROR_INVALID_DATE_RANGE(start=start_str, end=end_str, - reason='Request up to a maximum of 12 months.') + raise ERROR_INVALID_DATE_RANGE( + start=start_str, + end=end_str, + reason="Request up to a maximum of 12 months.", + ) if start + relativedelta(months=36) < now.replace(day=1): - raise ERROR_INVALID_DATE_RANGE(start=start_str, end=end_str, - reason='For MONTHLY, you cannot request data older than 3 years.') - elif granularity == 'YEARLY': + raise ERROR_INVALID_DATE_RANGE( + start=start_str, + end=end_str, + reason="For MONTHLY, you cannot request data older than 3 years.", + ) + elif granularity == "YEARLY": if start + relativedelta(years=3) < now.replace(month=1, day=1): - raise ERROR_INVALID_DATE_RANGE(start=start_str, end=end_str, - reason='For YEARLY, you cannot request data older than 3 years.') + raise ERROR_INVALID_DATE_RANGE( + start=start_str, + end=end_str, + reason="For YEARLY, you cannot request data older than 3 years.", + ) def _parse_start_time(self, date_str, granularity): - return self._convert_date_from_string(date_str.strip(), 'start', granularity) + return self._convert_date_from_string(date_str.strip(), "start", granularity) def _parse_end_time(self, date_str, granularity): - end = self._convert_date_from_string(date_str.strip(), 'end', granularity) + end = self._convert_date_from_string(date_str.strip(), "end", granularity) - if granularity == 'YEARLY': + if granularity == "YEARLY": return end + relativedelta(years=1) - elif granularity == 'MONTHLY': + elif granularity == "MONTHLY": return end + relativedelta(months=1) else: return end + relativedelta(days=1) @staticmethod def _convert_date_from_string(date_str, key, granularity): - if granularity == 'YEARLY': - date_format = '%Y' - date_type = 'YYYY' - elif granularity == 'MONTHLY': + if granularity == "YEARLY": + date_format = "%Y" + date_type = "YYYY" + elif granularity == "MONTHLY": if len(date_str) == 4: - date_format = '%Y' - date_type = 'YYYY' + date_format = "%Y" + date_type = "YYYY" else: - date_format = '%Y-%m' - date_type = 'YYYY-MM' + date_format = "%Y-%m" + date_type = "YYYY-MM" else: if len(date_str) == 4: - date_format = '%Y' - date_type = 'YYYY' + date_format = "%Y" + date_type = "YYYY" elif len(date_str) == 7: - date_format = '%Y-%m' - date_type = 'YYYY-MM' + date_format = "%Y-%m" + date_type = "YYYY-MM" else: - date_format = '%Y-%m-%d' - date_type = 'YYYY-MM-DD' + date_format = "%Y-%m-%d" + date_type = "YYYY-MM-DD" try: return datetime.strptime(date_str, date_format).date() @@ -263,19 +334,23 @@ def _convert_date_from_string(date_str, key, granularity): @staticmethod def _get_billed_at_from_billed_date(billed_date): - date_format = '%Y-%m-%d' + date_format = "%Y-%m-%d" try: return datetime.strptime(billed_date, date_format) except Exception as e: - raise ERROR_INVALID_PARAMETER_TYPE(key='billed_date', type='YYYY-MM-DD') + raise ERROR_INVALID_PARAMETER_TYPE(key="billed_date", type="YYYY-MM-DD") - @cache.cacheable(key='project-group-map:{domain_id}', expire=600) + @cache.cacheable(key="project-group-map:{domain_id}", expire=600) def _get_project_group_map(self, domain_id): project_group_map = {} - identity_mgr: IdentityManager = self.locator.get_manager('IdentityManager') - response = identity_mgr.list_projects({'only': ['project_id', 'project_group_id']}, domain_id) - for project_info in response.get('results', []): - project_group_map[project_info['project_id']] = project_info['project_group_info']['project_group_id'] + identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") + response = identity_mgr.list_projects( + {"only": ["project_id", "project_group_id"]}, domain_id + ) + for project_info in response.get("results", []): + project_group_map[project_info["project_id"]] = project_info[ + "project_group_info" + ]["project_group_id"] return project_group_map diff --git a/src/spaceone/cost_analysis/manager/cost_query_set_manager.py b/src/spaceone/cost_analysis/manager/cost_query_set_manager.py index fbcbcf8a..a30f359c 100644 --- a/src/spaceone/cost_analysis/manager/cost_query_set_manager.py +++ b/src/spaceone/cost_analysis/manager/cost_query_set_manager.py @@ -7,16 +7,17 @@ class CostQuerySetManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.cost_query_set_model: CostQuerySet = self.locator.get_model('CostQuerySet') + self.cost_query_set_model: CostQuerySet = self.locator.get_model("CostQuerySet") def create_cost_query_set(self, params): def _rollback(cost_query_set_vo): - _LOGGER.info(f'[create_cost_query_set._rollback] ' - f'Delete cost_query_set : {cost_query_set_vo.name} ' - f'({cost_query_set_vo.cost_query_set_id})') + _LOGGER.info( + f"[create_cost_query_set._rollback] " + f"Delete cost_query_set : {cost_query_set_vo.name} " + f"({cost_query_set_vo.cost_query_set_id})" + ) cost_query_set_vo.delete() cost_query_set_vo: CostQuerySet = self.cost_query_set_model.create(params) @@ -25,24 +26,32 @@ def _rollback(cost_query_set_vo): return cost_query_set_vo def update_cost_query_set(self, params): - cost_query_set_vo: CostQuerySet = self.get_cost_query_set(params['cost_query_set_id'], params['domain_id']) + cost_query_set_vo: CostQuerySet = self.get_cost_query_set( + params["cost_query_set_id"], params["domain_id"] + ) return self.update_cost_query_set_by_vo(params, cost_query_set_vo) def update_cost_query_set_by_vo(self, params, cost_query_set_vo): def _rollback(old_data): - _LOGGER.info(f'[update_cost_query_set_by_vo._rollback] Revert Data : ' - f'{old_data["cost_query_set_id"]}') + _LOGGER.info( + f"[update_cost_query_set_by_vo._rollback] Revert Data : " + f'{old_data["cost_query_set_id"]}' + ) cost_query_set_vo.update(old_data) self.transaction.add_rollback(_rollback, cost_query_set_vo.to_dict()) return cost_query_set_vo.update(params) def delete_cost_query_set(self, cost_query_set_id, domain_id): - cost_query_set_vo: CostQuerySet = self.get_cost_query_set(cost_query_set_id, domain_id) + cost_query_set_vo: CostQuerySet = self.get_cost_query_set( + cost_query_set_id, domain_id + ) cost_query_set_vo.delete() - def get_cost_query_set(self, cost_query_set_id, domain_id, only=None): - return self.cost_query_set_model.get(cost_query_set_id=cost_query_set_id, domain_id=domain_id, only=only) + def get_cost_query_set(self, cost_query_set_id, user_id, domain_id): + return self.cost_query_set_model.get( + cost_query_set_id=cost_query_set_id, user_id=user_id, domain_id=domain_id + ) def list_cost_query_sets(self, query={}): return self.cost_query_set_model.query(**query) diff --git a/src/spaceone/cost_analysis/manager/data_source_manager.py b/src/spaceone/cost_analysis/manager/data_source_manager.py index 81dd1fe9..676d2b43 100644 --- a/src/spaceone/cost_analysis/manager/data_source_manager.py +++ b/src/spaceone/cost_analysis/manager/data_source_manager.py @@ -7,16 +7,17 @@ class DataSourceManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.data_source_model: DataSource = self.locator.get_model('DataSource') + self.data_source_model: DataSource = self.locator.get_model("DataSource") def register_data_source(self, params): def _rollback(data_source_vo): - _LOGGER.info(f'[register_data_source._rollback] ' - f'Delete data source : {data_source_vo.name} ' - f'({data_source_vo.data_source_id})') + _LOGGER.info( + f"[register_data_source._rollback] " + f"Delete data source : {data_source_vo.name} " + f"({data_source_vo.data_source_id})" + ) data_source_vo.delete() data_source_vo: DataSource = self.data_source_model.create(params) @@ -25,13 +26,17 @@ def _rollback(data_source_vo): return data_source_vo def update_data_source(self, params): - data_source_vo: DataSource = self.get_data_source(params['data_source_id'], params['domain_id']) + data_source_vo: DataSource = self.get_data_source( + params["data_source_id"], params["domain_id"] + ) return self.update_data_source_by_vo(params, data_source_vo) def update_data_source_by_vo(self, params, data_source_vo): def _rollback(old_data): - _LOGGER.info(f'[update_data_source_by_vo._rollback] Revert Data : ' - f'{old_data["data_source_id"]}') + _LOGGER.info( + f"[update_data_source_by_vo._rollback] Revert Data : " + f'{old_data["data_source_id"]}' + ) data_source_vo.update(old_data) self.transaction.add_rollback(_rollback, data_source_vo.to_dict()) @@ -45,8 +50,12 @@ def deregister_data_source(self, data_source_id, domain_id): def deregister_data_source_by_vo(data_source_vo): data_source_vo.delete() - def get_data_source(self, data_source_id, domain_id, only=None): - return self.data_source_model.get(data_source_id=data_source_id, domain_id=domain_id, only=only) + def get_data_source(self, data_source_id, domain_id, workspace_id=None): + conditions = {"data_source_id": data_source_id, "domain_id": domain_id} + + if workspace_id: + conditions["workspace_id"] = workspace_id + return self.data_source_model.get(**conditions) def filter_data_sources(self, **conditions): return self.data_source_model.filter(**conditions) diff --git a/src/spaceone/cost_analysis/manager/data_source_rule_manager.py b/src/spaceone/cost_analysis/manager/data_source_rule_manager.py index 271e4cf1..2a07523a 100644 --- a/src/spaceone/cost_analysis/manager/data_source_rule_manager.py +++ b/src/spaceone/cost_analysis/manager/data_source_rule_manager.py @@ -4,26 +4,32 @@ from spaceone.core import utils from spaceone.core.manager import BaseManager from spaceone.cost_analysis.manager.identity_manager import IdentityManager -from spaceone.cost_analysis.model.data_source_rule_model import DataSourceRule, DataSourceRuleCondition +from spaceone.cost_analysis.model.data_source_rule_model import ( + DataSourceRule, + DataSourceRuleCondition, +) _LOGGER = logging.getLogger(__name__) class DataSourceRuleManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.data_source_rule_model: DataSourceRule = self.locator.get_model('DataSourceRule') - self.identity_mgr: IdentityManager = self.locator.get_manager('IdentityManager') + self.data_source_rule_model: DataSourceRule = self.locator.get_model( + "DataSourceRule" + ) + self.identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") self._project_info = {} self._service_account_info = {} self._data_source_rule_info = {} def create_data_source_rule(self, params): def _rollback(data_source_rule_vo: DataSourceRule): - _LOGGER.info(f'[create_data_source_rule._rollback] ' - f'Delete event rule : {data_source_rule_vo.name} ' - f'({data_source_rule_vo.data_source_rule_id})') + _LOGGER.info( + f"[create_data_source_rule._rollback] " + f"Delete event rule : {data_source_rule_vo.name} " + f"({data_source_rule_vo.data_source_rule_id})" + ) data_source_rule_vo.delete() data_source_rule_vo: DataSourceRule = self.data_source_rule_model.create(params) @@ -32,14 +38,17 @@ def _rollback(data_source_rule_vo: DataSourceRule): return data_source_rule_vo def update_data_source_rule(self, params): - data_source_rule_vo: DataSourceRule = self.get_data_source_rule(params['data_source_rule_id'], - params['domain_id']) + data_source_rule_vo: DataSourceRule = self.get_data_source_rule( + params["data_source_rule_id"], params["domain_id"] + ) return self.update_data_source_rule_by_vo(params, data_source_rule_vo) def update_data_source_rule_by_vo(self, params, data_source_rule_vo): def _rollback(old_data): - _LOGGER.info(f'[update_data_source_rule_by_vo._rollback] Revert Data : ' - f'{old_data["data_source_rule_id"]}') + _LOGGER.info( + f"[update_data_source_rule_by_vo._rollback] Revert Data : " + f'{old_data["data_source_rule_id"]}' + ) data_source_rule_vo.update(old_data) self.transaction.add_rollback(_rollback, data_source_rule_vo.to_dict()) @@ -47,14 +56,24 @@ def _rollback(old_data): return data_source_rule_vo.update(params) def delete_data_source_rule(self, data_source_rule_id, domain_id): - data_source_rule_vo: DataSourceRule = self.get_data_source_rule(data_source_rule_id, domain_id) + data_source_rule_vo: DataSourceRule = self.get_data_source_rule( + data_source_rule_id, domain_id + ) self.delete_data_source_rule_by_vo(data_source_rule_vo) def delete_data_source_rule_by_vo(self, data_source_rule_vo): data_source_rule_vo.delete() - def get_data_source_rule(self, data_source_rule_id, domain_id, only=None): - return self.data_source_rule_model.get(data_source_rule_id=data_source_rule_id, domain_id=domain_id, only=only) + def get_data_source_rule(self, data_source_rule_id, domain_id, workspace_id=None): + conditions = { + "data_source_rule_id": data_source_rule_id, + "domain_id": domain_id, + } + + if workspace_id: + conditions["workspace_id"] = workspace_id + + return self.data_source_rule_model.get(**conditions) def filter_data_source_rules(self, **conditions): return self.data_source_rule_model.filter(**conditions) @@ -66,20 +85,31 @@ def stat_data_source_rules(self, query): return self.data_source_rule_model.stat(**query) def change_cost_data(self, cost_data): - data_source_id = cost_data['data_source_id'] - domain_id = cost_data['domain_id'] - managed_data_source_rule_vos, custom_data_source_rule_vos = self._get_data_source_rules(data_source_id, domain_id) - - cost_data = self._apply_data_source_rule_to_cost_data(cost_data, managed_data_source_rule_vos, domain_id) - cost_data = self._apply_data_source_rule_to_cost_data(cost_data, custom_data_source_rule_vos, domain_id) + data_source_id = cost_data["data_source_id"] + domain_id = cost_data["domain_id"] + ( + managed_data_source_rule_vos, + custom_data_source_rule_vos, + ) = self._get_data_source_rules(data_source_id, domain_id) + + cost_data = self._apply_data_source_rule_to_cost_data( + cost_data, managed_data_source_rule_vos, domain_id + ) + cost_data = self._apply_data_source_rule_to_cost_data( + cost_data, custom_data_source_rule_vos, domain_id + ) return cost_data - def _apply_data_source_rule_to_cost_data(self, cost_data, data_source_rule_vos, domain_id): + def _apply_data_source_rule_to_cost_data( + self, cost_data, data_source_rule_vos, domain_id + ): for data_source_rule_vo in data_source_rule_vos: is_match = self._change_cost_data_by_rule(cost_data, data_source_rule_vo) if is_match: - cost_data = self._change_cost_data_with_actions(cost_data, data_source_rule_vo.actions, domain_id) + cost_data = self._change_cost_data_with_actions( + cost_data, data_source_rule_vo.actions, domain_id + ) if is_match and data_source_rule_vo.options.stop_processing: break @@ -88,88 +118,111 @@ def _apply_data_source_rule_to_cost_data(self, cost_data, data_source_rule_vos, def _change_cost_data_with_actions(self, cost_data, actions, domain_id): for action, value in actions.items(): - if action == 'change_project' and value: - cost_data['project_id'] = value + if action == "change_project" and value: + cost_data["project_id"] = value - elif action == 'match_project' and value: - source = value['source'] - target_key = value.get('target', 'project_id') + elif action == "match_project" and value: + source = value["source"] + target_key = value.get("target", "project_id") target_value = utils.get_dict_value(cost_data, source) if target_value: - project_info = self._get_project(target_key, target_value, domain_id) + project_info = self._get_project( + target_key, target_value, domain_id + ) if project_info: - cost_data['project_id'] = project_info['project_id'] + cost_data["project_id"] = project_info["project_id"] - elif action == 'match_service_account' and value: - source = value['source'] - target_key = value.get('target', 'service_account_id') + elif action == "match_service_account" and value: + source = value["source"] + target_key = value.get("target", "service_account_id") target_value = utils.get_dict_value(cost_data, source) if target_value: - service_account_info = self._get_service_account(target_key, target_value, domain_id) + service_account_info = self._get_service_account( + target_key, target_value, domain_id + ) if service_account_info: - cost_data['service_account_id'] = service_account_info['service_account_id'] - cost_data['project_id'] = service_account_info.get('project_info', {}).get('project_id') + cost_data["service_account_id"] = service_account_info[ + "service_account_id" + ] + cost_data["project_id"] = service_account_info.get( + "project_info", {} + ).get("project_id") - if action == 'add_additional_info' and value: - cost_data['additional_info'] = cost_data.get('additional_info', {}) - cost_data['additional_info'].update(value) + if action == "add_additional_info" and value: + cost_data["additional_info"] = cost_data.get("additional_info", {}) + cost_data["additional_info"].update(value) return cost_data def _get_service_account(self, target_key, target_value, domain_id): - if f'service-account:{domain_id}:{target_key}:{target_value}' in self._service_account_info: - return self._service_account_info[f'service-account:{domain_id}:{target_key}:{target_value}'] + if ( + f"service-account:{domain_id}:{target_key}:{target_value}" + in self._service_account_info + ): + return self._service_account_info[ + f"service-account:{domain_id}:{target_key}:{target_value}" + ] query = { - 'filter': [ - {'k': target_key, 'v': target_value, 'o': 'eq'}, - {'k': 'service_account_type', 'v': 'GENERAL', 'o': 'eq'} + "filter": [ + {"k": target_key, "v": target_value, "o": "eq"}, + {"k": "service_account_type", "v": "GENERAL", "o": "eq"}, + {"k": "domain_id", "v": domain_id, "o": "eq"}, ], - 'only': ['service_account_id', 'project_info'] + "only": ["service_account_id", "project_info"], } - response = self.identity_mgr.list_service_accounts(query, domain_id) - results = response.get('results', []) - total_count = response.get('total_count', 0) + response = self.identity_mgr.list_service_accounts(query) + results = response.get("results", []) + total_count = response.get("total_count", 0) service_account_info = None if total_count > 0: service_account_info = results[0] - self._service_account_info[f'service-account:{domain_id}:{target_key}:{target_value}'] = service_account_info + self._service_account_info[ + f"service-account:{domain_id}:{target_key}:{target_value}" + ] = service_account_info return service_account_info def _get_project(self, target_key, target_value, domain_id): - if f'project:{domain_id}:{target_key}:{target_value}' in self._project_info: - return self._project_info[f'project:{domain_id}:{target_key}:{target_value}'] + if f"project:{domain_id}:{target_key}:{target_value}" in self._project_info: + return self._project_info[ + f"project:{domain_id}:{target_key}:{target_value}" + ] query = { - 'filter': [ - {'k': target_key, 'v': target_value, 'o': 'eq'} - ], - 'only': ['project_id'] + "filter": [{"k": target_key, "v": target_value, "o": "eq"}], + "only": ["project_id"], } response = self.identity_mgr.list_projects(query, domain_id) - results = response.get('results', []) - total_count = response.get('total_count', 0) + results = response.get("results", []) + total_count = response.get("total_count", 0) project_info = None if total_count > 0: project_info = results[0] - self._project_info[f'project:{domain_id}:{target_key}:{target_value}'] = project_info + self._project_info[ + f"project:{domain_id}:{target_key}:{target_value}" + ] = project_info return project_info def _change_cost_data_by_rule(self, cost_data, data_source_rule_vo: DataSourceRule): conditions_policy = data_source_rule_vo.conditions_policy - if conditions_policy == 'ALWAYS': + if conditions_policy == "ALWAYS": return True else: - results = list(map(functools.partial(self._check_condition, cost_data), data_source_rule_vo.conditions)) - - if conditions_policy == 'ALL': + results = list( + map( + functools.partial(self._check_condition, cost_data), + data_source_rule_vo.conditions, + ) + ) + + if conditions_policy == "ALL": return all(results) else: return any(results) @@ -183,22 +236,22 @@ def _check_condition(cost_data, condition: DataSourceRuleCondition): if cost_value is None: return False - if operator == 'eq': + if operator == "eq": if cost_value == condition_value: return True else: return False - elif operator == 'contain': + elif operator == "contain": if cost_value.lower().find(condition_value.lower()) >= 0: return True else: return False - elif operator == 'not': + elif operator == "not": if cost_value != condition_value: return True else: return False - elif operator == 'not_contain': + elif operator == "not_contain": if cost_value.lower().find(condition_value.lower()) < 0: return True else: @@ -208,28 +261,41 @@ def _check_condition(cost_data, condition: DataSourceRuleCondition): def _get_data_source_rules(self, data_source_id, domain_id): if data_source_id in self._data_source_rule_info: - return self._data_source_rule_info[data_source_id].get('managed', []), \ - self._data_source_rule_info[data_source_id].get('custom', []) - - managed_query = self._make_data_source_rule_query(data_source_id, 'MANAGED', domain_id) - managed_data_source_rule_vos, total_count = self.list_data_source_rules(managed_query) - - custom_query = self._make_data_source_rule_query(data_source_id, 'CUSTOM', domain_id) - custom_data_source_rule_vos, total_count = self.list_data_source_rules(custom_query) + return self._data_source_rule_info[data_source_id].get( + "managed", [] + ), self._data_source_rule_info[data_source_id].get("custom", []) + + managed_query = self._make_data_source_rule_query( + data_source_id, "MANAGED", domain_id + ) + managed_data_source_rule_vos, total_count = self.list_data_source_rules( + managed_query + ) + + custom_query = self._make_data_source_rule_query( + data_source_id, "CUSTOM", domain_id + ) + custom_data_source_rule_vos, total_count = self.list_data_source_rules( + custom_query + ) self._data_source_rule_info[data_source_id] = {} - self._data_source_rule_info[data_source_id]['managed'] = managed_data_source_rule_vos - self._data_source_rule_info[data_source_id]['custom'] = custom_data_source_rule_vos + self._data_source_rule_info[data_source_id][ + "managed" + ] = managed_data_source_rule_vos + self._data_source_rule_info[data_source_id][ + "custom" + ] = custom_data_source_rule_vos return managed_data_source_rule_vos, custom_data_source_rule_vos @staticmethod def _make_data_source_rule_query(data_source_id, rule_type, domain_id): return { - 'filter': [ - {'k': 'data_source_id', 'v': data_source_id, 'o': 'eq'}, - {'k': 'domain_id', 'v': domain_id, 'o': 'eq'}, - {'k': 'rule_type', 'v': rule_type, 'o': 'eq'} + "filter": [ + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "domain_id", "v": domain_id, "o": "eq"}, + {"k": "rule_type", "v": rule_type, "o": "eq"}, ], - 'sort': {'key': 'order'} + "sort": {"key": "order"}, } diff --git a/src/spaceone/cost_analysis/manager/identity_manager.py b/src/spaceone/cost_analysis/manager/identity_manager.py index 1bc45b97..2b4b9358 100644 --- a/src/spaceone/cost_analysis/manager/identity_manager.py +++ b/src/spaceone/cost_analysis/manager/identity_manager.py @@ -1,57 +1,40 @@ -import logging - -from spaceone.core import config from spaceone.core.manager import BaseManager from spaceone.core.connector.space_connector import SpaceConnector -from spaceone.core import cache - -_LOGGER = logging.getLogger(__name__) class IdentityManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.identity_connector: SpaceConnector = self.locator.get_connector('SpaceConnector', service='identity', - token=config.get_global('TOKEN')) - - def list_projects(self, query, domain_id): - return self.identity_connector.dispatch('Project.list', {'query': query, 'domain_id': domain_id}) - - @cache.cacheable(key='project-name:{domain_id}:{project_id}', expire=300) - def get_project_name(self, project_id, domain_id): - try: - project_info = self.get_project(project_id, domain_id) - return f'{project_info["project_group_info"]["name"]} > {project_info["name"]}' - except Exception as e: - _LOGGER.error(f'[get_project_name] API Error: {e}') - return project_id - - def get_project(self, project_id, domain_id): - return self.identity_connector.dispatch('Project.get', {'project_id': project_id, 'domain_id': domain_id}) - - def list_project_groups(self, query, domain_id): - return self.identity_connector.dispatch('ProjectGroup.list', {'query': query, 'domain_id': domain_id}) - - def get_project_group(self, project_group_id, domain_id): - return self.identity_connector.dispatch('ProjectGroup.get', {'project_group_id': project_group_id, - 'domain_id': domain_id}) - - def list_projects_in_project_group(self, project_group_id, domain_id, recursive=False, query=None): - request = { - 'project_group_id': project_group_id, - 'domain_id': domain_id, - 'recursive': recursive - } - - if query: - request['query'] = query - - return self.identity_connector.dispatch('ProjectGroup.list_projects', request) - - def get_service_account(self, service_account_id, domain_id): - return self.identity_connector.dispatch('ServiceAccount.get', {'service_account_id': service_account_id, - 'domain_id': domain_id}) - - def list_service_accounts(self, query, domain_id): - return self.identity_connector.dispatch('ServiceAccount.list', {'query': query, 'domain_id': domain_id}) + self.identity_conn: SpaceConnector = self.locator.get_connector( + "SpaceConnector", service="identity" + ) + + def get_workspace(self, workspace_id): + return self.identity_conn.dispatch( + "Workspace.get", + {"workspace_id": workspace_id}, + ) + + def get_trusted_account(self, trusted_account_id): + return self.identity_conn.dispatch( + "TrustedAccount.get", + {"trusted_account_id": trusted_account_id}, + ) + + def list_trusted_accounts(self, query): + return self.identity_conn.dispatch("TrustedAccount.list", {"query": query}) + + def get_service_account(self, service_account_id): + return self.identity_conn.dispatch( + "ServiceAccount.get", + {"service_account_id": service_account_id}, + ) + + def list_service_accounts(self, query): + return self.identity_conn.dispatch("ServiceAccount.list", {"query": query}) + + def get_project(self, project_id): + return self.identity_conn.dispatch("Project.get", {"project_id": project_id}) + + def list_projects(self, query): + return self.identity_conn.dispatch("Project.list", {"query": query}) diff --git a/src/spaceone/cost_analysis/manager/job_manager.py b/src/spaceone/cost_analysis/manager/job_manager.py index 0f61c491..2e4f751b 100644 --- a/src/spaceone/cost_analysis/manager/job_manager.py +++ b/src/spaceone/cost_analysis/manager/job_manager.py @@ -1,6 +1,6 @@ import logging import copy -from typing import List +from typing import List, Union from datetime import datetime, timedelta from spaceone.core.error import * @@ -14,55 +14,74 @@ class JobManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.job_model: Job = self.locator.get_model('Job') - self.job_timeout = config.get_global('JOB_TIMEOUT', 7200) - self.cost_mgr: CostManager = self.locator.get_manager('CostManager') + self.job_model: Job = self.locator.get_model("Job") + self.job_timeout = config.get_global("JOB_TIMEOUT", 7200) + self.cost_mgr: CostManager = self.locator.get_manager("CostManager") def is_job_running(self, data_source_id, domain_id): - job_vos: List[Job] = self.job_model.filter(data_source_id=data_source_id, domain_id=domain_id, - status='IN_PROGRESS') + job_vos: List[Job] = self.job_model.filter( + data_source_id=data_source_id, domain_id=domain_id, status="IN_PROGRESS" + ) running_job_count = job_vos.count() for job_vo in job_vos: - if datetime.utcnow() > (job_vo.created_at + timedelta(seconds=self.job_timeout)): + if datetime.utcnow() > ( + job_vo.created_at + timedelta(seconds=self.job_timeout) + ): self.change_timeout_status(job_vo) running_job_count -= 1 return running_job_count > 0 - def create_job(self, data_source_id, domain_id, job_options, total_tasks, changed=None): - job_options['no_preload_cache'] = job_options.get('no_preload_cache', False) - job_options['start'] = job_options.get('start') + def create_job( + self, + resource_group: str, + data_source_id: str, + workspace_id: str, + domain_id: str, + job_options: dict, + total_tasks: int, + changed: Union[list, None] = None, + ): + job_options["no_preload_cache"] = job_options.get("no_preload_cache", False) + job_options["start"] = job_options.get("start") data = { - 'data_source_id': data_source_id, - 'domain_id': domain_id, - 'options': job_options + "data_source_id": data_source_id, + "resource_group": resource_group, + "workspace_id": workspace_id, + "domain_id": domain_id, + "options": job_options, } if total_tasks: - data.update({ - 'total_tasks': total_tasks, - 'remained_tasks': total_tasks - }) + data.update({"total_tasks": total_tasks, "remained_tasks": total_tasks}) if changed: - data['changed'] = changed + data["changed"] = changed job_vo = self.job_model.create(data) - _LOGGER.debug(f'[create_job] create job: {job_vo.job_id}') + _LOGGER.debug(f"[create_job] create job: {job_vo.job_id}") return job_vo def update_job_by_vo(self, params, job_vo): return job_vo.update(params) - def get_job(self, job_id, domain_id, only=None): - return self.job_model.get(job_id=job_id, domain_id=domain_id, only=only) + def get_job( + self, job_id: str, domain_id: str, workspace_id: Union[str, list] = None + ): + conditions = { + "job_id": job_id, + "domain_id": domain_id, + } + + if workspace_id: + conditions["workspace_id"] = workspace_id + return self.job_model.get(**conditions) def filter_jobs(self, **conditions): return self.job_model.filter(**conditions) @@ -74,22 +93,26 @@ def stat_jobs(self, query): return self.job_model.stat(**query) def preload_cost_stat_queries(self, domain_id, data_source_id): - cost_query_cache_time = config.get_global('COST_QUERY_CACHE_TIME', 4) + cost_query_cache_time = config.get_global("COST_QUERY_CACHE_TIME", 4) cache_time = datetime.utcnow() - timedelta(days=cost_query_cache_time) query = { - 'filter': [ - {'k': 'domain_id', 'v': domain_id, 'o': 'eq'}, - {'k': 'data_source_id', 'v': data_source_id, 'o': 'eq'}, - {'k': 'updated_at', 'v': cache_time, 'o': 'gte'}, + "filter": [ + {"k": "domain_id", "v": domain_id, "o": "eq"}, + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "updated_at", "v": cache_time, "o": "gte"}, ] } - _LOGGER.debug(f'[_preload_cost_stat_queries] cost_query_cache_time: {cost_query_cache_time} days') + _LOGGER.debug( + f"[_preload_cost_stat_queries] cost_query_cache_time: {cost_query_cache_time} days" + ) history_vos, total_count = self.cost_mgr.list_cost_query_history(query) for history_vo in history_vos: - _LOGGER.debug(f'[_preload_cost_stat_queries] create query cache: {history_vo.query_hash}') + _LOGGER.debug( + f"[_preload_cost_stat_queries] create query cache: {history_vo.query_hash}" + ) self._create_cache_by_history(history_vo, domain_id) def _create_cache_by_history(self, history_vo: CostQueryHistory, domain_id): @@ -101,57 +124,61 @@ def _create_cache_by_history(self, history_vo: CostQueryHistory, domain_id): self._create_cache(copy.deepcopy(query), query_hash, domain_id, data_source_id) def _create_cache(self, query, query_hash, domain_id, data_source_id): - if granularity := query.get('granularity'): - if granularity == 'DAILY': - self.cost_mgr.analyze_costs_with_cache(query, query_hash, domain_id, data_source_id) - elif granularity == 'MONTHLY': - self.cost_mgr.analyze_monthly_costs_with_cache(query, query_hash, domain_id, data_source_id) - elif granularity == 'YEARLY': - self.cost_mgr.analyze_yearly_costs_with_cache(query, query_hash, domain_id, data_source_id) + if granularity := query.get("granularity"): + if granularity == "DAILY": + self.cost_mgr.analyze_costs_with_cache( + query, query_hash, domain_id, data_source_id + ) + elif granularity == "MONTHLY": + self.cost_mgr.analyze_monthly_costs_with_cache( + query, query_hash, domain_id, data_source_id + ) + elif granularity == "YEARLY": + self.cost_mgr.analyze_yearly_costs_with_cache( + query, query_hash, domain_id, data_source_id + ) else: - self.cost_mgr.stat_monthly_costs_with_cache(query, query_hash, domain_id, data_source_id) + self.cost_mgr.stat_monthly_costs_with_cache( + query, query_hash, domain_id, data_source_id + ) @staticmethod def decrease_remained_tasks(job_vo: Job): - return job_vo.decrement('remained_tasks', 1) + return job_vo.decrement("remained_tasks", 1) @staticmethod def change_success_status(job_vo: Job): - _LOGGER.info(f'[change_success_status] job success: {job_vo.job_id}') + _LOGGER.info(f"[change_success_status] job success: {job_vo.job_id}") - return job_vo.update({ - 'status': 'SUCCESS', - 'finished_at': datetime.utcnow() - }) + return job_vo.update({"status": "SUCCESS", "finished_at": datetime.utcnow()}) @staticmethod def change_canceled_status(job_vo: Job): - _LOGGER.error(f'[change_canceled_status], job canceled ({job_vo.job_id})') + _LOGGER.error(f"[change_canceled_status], job canceled ({job_vo.job_id})") - return job_vo.update({ - 'status': 'CANCELED', - 'finished_at': datetime.utcnow() - }) + return job_vo.update({"status": "CANCELED", "finished_at": datetime.utcnow()}) @staticmethod def change_timeout_status(job_vo: Job): - _LOGGER.error(f'[change_timeout_status] job timeout: {job_vo.job_id}') + _LOGGER.error(f"[change_timeout_status] job timeout: {job_vo.job_id}") - return job_vo.update({ - 'status': 'TIMEOUT', - 'finished_at': datetime.utcnow() - }) + return job_vo.update({"status": "TIMEOUT", "finished_at": datetime.utcnow()}) @staticmethod def change_error_status(job_vo: Job, e): if not isinstance(e, ERROR_BASE): e = ERROR_UNKNOWN(message=str(e)) - _LOGGER.error(f'[change_error_status] job error ({job_vo.job_id}): {e.message}', exc_info=True) - - job_vo.update({ - 'status': 'FAILURE', - 'error_code': e.error_code, - 'error_message': e.message, - 'finished_at': datetime.utcnow() - }) + _LOGGER.error( + f"[change_error_status] job error ({job_vo.job_id}): {e.message}", + exc_info=True, + ) + + job_vo.update( + { + "status": "FAILURE", + "error_code": e.error_code, + "error_message": e.message, + "finished_at": datetime.utcnow(), + } + ) diff --git a/src/spaceone/cost_analysis/manager/job_task_manager.py b/src/spaceone/cost_analysis/manager/job_task_manager.py index 93dd082d..95721e22 100644 --- a/src/spaceone/cost_analysis/manager/job_task_manager.py +++ b/src/spaceone/cost_analysis/manager/job_task_manager.py @@ -1,5 +1,6 @@ import logging from datetime import datetime +from typing import Union from spaceone.core import queue, utils from spaceone.core.token import get_token @@ -12,27 +13,33 @@ class JobTaskManager(BaseManager): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.job_mgr: JobManager = self.locator.get_manager('JobManager') - self.job_task_model: JobTask = self.locator.get_model('JobTask') + self.job_mgr: JobManager = self.locator.get_manager("JobManager") + self.job_task_model: JobTask = self.locator.get_model("JobTask") def create_job_task(self, job_id, data_source_id, domain_id, task_options): data = { - 'job_id': job_id, - 'data_source_id': data_source_id, - 'domain_id': domain_id, - 'options': task_options + "job_id": job_id, + "data_source_id": data_source_id, + "domain_id": domain_id, + "options": task_options, } - _LOGGER.debug(f'[create_job_task] create job task: {data}') + _LOGGER.debug(f"[create_job_task] create job task: {data}") return self.job_task_model.create(data) - def get_job_task(self, job_task_id, domain_id, only=None): - return self.job_task_model.get(job_task_id=job_task_id, domain_id=domain_id, only=only) + def get_job_task( + self, job_task_id, domain_id, workspace_id: Union[str, list, None] = None + ): + conditions = {"job_task_id": job_task_id, "domain_id": domain_id} + + if workspace_id: + conditions["workspace_id"] = workspace_id + + return self.job_task_model.get(**conditions) def filter_job_tasks(self, **conditions): return self.job_task_model.filter(**conditions) @@ -46,59 +53,63 @@ def stat_job_tasks(self, query): @staticmethod def push_job_task(params): task = { - 'name': 'sync_data_source', - 'version': 'v1', - 'executionEngine': 'BaseWorker', - 'stages': [{ - 'locator': 'SERVICE', - 'name': 'JobService', - 'metadata': {}, - 'method': 'get_cost_data', - 'params': { - 'params': params + "name": "sync_data_source", + "version": "v1", + "executionEngine": "BaseWorker", + "stages": [ + { + "locator": "SERVICE", + "name": "JobService", + "metadata": {}, + "method": "get_cost_data", + "params": {"params": params}, } - }] + ], } - _LOGGER.debug(f'[push_job_task] task: {params}') + _LOGGER.debug(f"[push_job_task] task: {params}") - queue.put('cost_analysis_q', utils.dump_json(task)) + queue.put("cost_analysis_q", utils.dump_json(task)) @staticmethod def change_in_progress_status(job_task_vo: JobTask): - _LOGGER.debug(f'[change_in_progress_status] start job task: {job_task_vo.job_task_id}') + _LOGGER.debug( + f"[change_in_progress_status] start job task: {job_task_vo.job_task_id}" + ) - return job_task_vo.update({ - 'status': 'IN_PROGRESS', - 'started_at': datetime.utcnow() - }) + return job_task_vo.update( + {"status": "IN_PROGRESS", "started_at": datetime.utcnow()} + ) @staticmethod def update_sync_status(job_task_vo: JobTask, created_count): - return job_task_vo.update({ - 'created_count': job_task_vo.created_count + created_count - }) + return job_task_vo.update( + {"created_count": job_task_vo.created_count + created_count} + ) def change_success_status(self, job_task_vo: JobTask, created_count): - _LOGGER.debug(f'[change_success_status] success job task: {job_task_vo.job_task_id} ' - f'(created_count = {created_count})') - - job_task_vo.update({ - 'status': 'SUCCESS', - 'created_count': created_count, - 'finished_at': datetime.utcnow() - }) + _LOGGER.debug( + f"[change_success_status] success job task: {job_task_vo.job_task_id} " + f"(created_count = {created_count})" + ) + + job_task_vo.update( + { + "status": "SUCCESS", + "created_count": created_count, + "finished_at": datetime.utcnow(), + } + ) job_vo = self.job_mgr.get_job(job_task_vo.job_id, job_task_vo.domain_id) self.job_mgr.decrease_remained_tasks(job_vo) def change_canceled_status(self, job_task_vo: JobTask): - _LOGGER.error(f'[change_canceled_status], job task canceled ({job_task_vo.job_task_id})') + _LOGGER.error( + f"[change_canceled_status], job task canceled ({job_task_vo.job_task_id})" + ) - job_task_vo.update({ - 'status': 'CANCELED', - 'finished_at': datetime.utcnow() - }) + job_task_vo.update({"status": "CANCELED", "finished_at": datetime.utcnow()}) job_vo = self.job_mgr.get_job(job_task_vo.job_id, job_task_vo.domain_id) self.job_mgr.decrease_remained_tasks(job_vo) @@ -107,17 +118,22 @@ def change_error_status(self, job_task_vo: JobTask, e, secret_type): if not isinstance(e, ERROR_BASE): e = ERROR_UNKNOWN(message=str(e)) - _LOGGER.error(f'[change_error_status], error job task ({job_task_vo.job_task_id}): {e.message}', exc_info=True) + _LOGGER.error( + f"[change_error_status], error job task ({job_task_vo.job_task_id}): {e.message}", + exc_info=True, + ) - job_task_vo.update({ - 'status': 'FAILURE', - 'error_code': e.error_code, - 'error_message': e.message, - 'finished_at': datetime.utcnow() - }) + job_task_vo.update( + { + "status": "FAILURE", + "error_code": e.error_code, + "error_message": e.message, + "finished_at": datetime.utcnow(), + } + ) job_vo = self.job_mgr.get_job(job_task_vo.job_id, job_task_vo.domain_id) self.job_mgr.decrease_remained_tasks(job_vo) - if secret_type != 'USE_SERVICE_ACCOUNT_SECRET': + if secret_type != "USE_SERVICE_ACCOUNT_SECRET": self.job_mgr.change_error_status(job_vo, ERROR_JOB_TASK()) diff --git a/src/spaceone/cost_analysis/model/budget_model.py b/src/spaceone/cost_analysis/model/budget_model.py index 9abaca7b..b2ce924c 100644 --- a/src/spaceone/cost_analysis/model/budget_model.py +++ b/src/spaceone/cost_analysis/model/budget_model.py @@ -9,7 +9,9 @@ class PlannedLimit(EmbeddedDocument): class ProviderFilter(EmbeddedDocument): - state = StringField(max_length=20, default='ENABLED', choices=('ENABLED', 'DISABLED')) + state = StringField( + max_length=20, default="ENABLED", choices=("ENABLED", "DISABLED") + ) providers = ListField(StringField(), default=[]) def to_dict(self): @@ -18,8 +20,10 @@ def to_dict(self): class Notification(EmbeddedDocument): threshold = FloatField(required=True) - unit = StringField(max_length=20, required=True, choices=('PERCENT', 'ACTUAL_COST')) - notification_type = StringField(max_length=20, required=True, choices=('CRITICAL', 'WARNING')) + unit = StringField(max_length=20, required=True, choices=("PERCENT", "ACTUAL_COST")) + notification_type = StringField( + max_length=20, required=True, choices=("CRITICAL", "WARNING") + ) notified_months = ListField(StringField(), default=[]) def to_dict(self): @@ -27,52 +31,53 @@ def to_dict(self): class Budget(MongoModel): - budget_id = StringField(max_length=40, generate_id='budget', unique=True) - name = StringField(max_length=255, default='') + budget_id = StringField(max_length=40, generate_id="budget", unique=True) + name = StringField(max_length=255, default="") limit = FloatField(required=True) planned_limits = ListField(EmbeddedDocumentField(PlannedLimit), default=[]) currency = StringField() provider_filter = EmbeddedDocumentField(ProviderFilter, required=True) - time_unit = StringField(max_length=20, choices=('TOTAL', 'MONTHLY')) + time_unit = StringField(max_length=20, choices=("TOTAL", "MONTHLY")) start = StringField(required=True, max_length=7) end = StringField(required=True, max_length=7) notifications = ListField(EmbeddedDocumentField(Notification), default=[]) tags = DictField(default={}) + resource_group = StringField(max_length=40, choices=("WORKSPACE", "PROJECT")) project_id = StringField(max_length=40, default=None, null=True) - project_group_id = StringField(max_length=40, default=None, null=True) data_source_id = StringField(max_length=40) + workspace_id = StringField(max_length=40) domain_id = StringField(max_length=40) created_at = DateTimeField(auto_now_add=True) updated_at = DateTimeField(auto_now=True) meta = { - 'updatable_fields': [ - 'name', - 'limit', - 'planned_limits', - 'provider_filter', - 'notifications', - 'tags' + "updatable_fields": [ + "name", + "limit", + "planned_limits", + "provider_filter", + "notifications", + "tags", ], - 'minimal_fields': [ - 'budget_id', - 'name', - 'limit', - 'provider_filter', - 'project_id', - 'project_group_id', - 'data_source_id' + "minimal_fields": [ + "budget_id", + "name", + "limit", + "provider_filter", + "project_id", + "project_group_id", + "data_source_id", ], - 'change_query_keys': { - 'user_projects': 'project_id', - 'user_project_groups': 'project_group_id' + "change_query_keys": { + "user_projects": "project_id", + "user_project_groups": "project_group_id", }, - 'ordering': ['name'], - 'indexes': [ - 'name', - 'project_id', - 'project_group_id', - 'data_source_id', - 'domain_id' - ] + "ordering": ["name"], + "indexes": [ + "name", + "project_id", + "project_group_id", + "data_source_id", + "domain_id", + ], } diff --git a/src/spaceone/cost_analysis/model/budget_usage_model.py b/src/spaceone/cost_analysis/model/budget_usage_model.py index bbed8569..b03dedad 100644 --- a/src/spaceone/cost_analysis/model/budget_usage_model.py +++ b/src/spaceone/cost_analysis/model/budget_usage_model.py @@ -4,7 +4,9 @@ class ProviderFilter(EmbeddedDocument): - state = StringField(max_length=20, default='ENABLED', choices=('ENABLED', 'DISABLED')) + state = StringField( + max_length=20, default="ENABLED", choices=("ENABLED", "DISABLED") + ) providers = ListField(StringField(), default=[]) def to_dict(self): @@ -13,44 +15,35 @@ def to_dict(self): class BudgetUsage(MongoModel): budget_id = StringField(max_length=40, required=True) - name = StringField(max_length=255, default='') + name = StringField(max_length=255, default="") date = StringField(max_length=7, required=True) cost = FloatField(required=True) limit = FloatField(required=True) currency = StringField(default=None, null=True) provider_filter = EmbeddedDocumentField(ProviderFilter, required=True) - budget = ReferenceField('Budget', reverse_delete_rule=CASCADE) + budget = ReferenceField("Budget", reverse_delete_rule=CASCADE) + resource_group = StringField(max_length=40, choices=["WORKSPACE", "PROJECT"]) project_id = StringField(max_length=40, default=None, null=True) - project_group_id = StringField(max_length=40, default=None, null=True) data_source_id = StringField(max_length=40) + workspace_id = StringField(max_length=40) domain_id = StringField(max_length=40) updated_at = DateTimeField(auto_now=True) meta = { - 'updatable_fields': [ - 'name', - 'cost', - 'limit' - ], - 'minimal_fields': [ - 'budget_id', - 'name', - 'date', - 'usd_cost', - 'limit' - ], - 'change_query_keys': { - 'user_projects': 'project_id', - 'user_project_groups': 'project_group_id' + "updatable_fields": ["name", "cost", "limit"], + "minimal_fields": ["budget_id", "name", "date", "usd_cost", "limit"], + "change_query_keys": { + "user_projects": "project_id", + "user_project_groups": "project_group_id", }, - 'ordering': ['budget_id', 'date'], - 'indexes': [ - 'budget_id', - 'name', - 'date', - 'project_id', - 'project_group_id', - 'data_source_id', - 'domain_id' - ] + "ordering": ["budget_id", "date"], + "indexes": [ + "budget_id", + "name", + "date", + "project_id", + "project_group_id", + "data_source_id", + "domain_id", + ], } diff --git a/src/spaceone/cost_analysis/model/cost_model.py b/src/spaceone/cost_analysis/model/cost_model.py index 6bd1fd8d..7c8f0a65 100644 --- a/src/spaceone/cost_analysis/model/cost_model.py +++ b/src/spaceone/cost_analysis/model/cost_model.py @@ -4,7 +4,7 @@ class Cost(MongoModel): - cost_id = StringField(max_length=40, generate_id='cost') + cost_id = StringField(max_length=40, generate_id="cost") cost = FloatField(required=True) usage_quantity = FloatField(default=0) usage_unit = StringField(max_length=255, default=None, null=True) @@ -22,45 +22,56 @@ class Cost(MongoModel): data_source_id = StringField(max_length=40) job_id = StringField(max_length=40, default=None, null=True) job_task_id = StringField(max_length=40, default=None, null=True) + workspace_id = StringField(max_length=40) domain_id = StringField(max_length=40) billed_year = StringField(max_length=4, required=True) billed_month = StringField(max_length=7, required=True) billed_date = StringField(max_length=10, required=True) meta = { - 'updatable_fields': [], - 'minimal_fields': [ - 'cost_id', - 'cost', - 'provider', - 'region_code', - 'product', - 'usage_type', - 'resource', - 'data_source_id', - 'billed_date' + "updatable_fields": [], + "minimal_fields": [ + "cost_id", + "cost", + "provider", + "region_code", + "product", + "usage_type", + "resource", + "data_source_id", + "billed_date", ], - 'change_query_keys': { - 'user_projects': 'project_id' - }, - 'indexes': [ + "change_query_keys": {"user_projects": "project_id"}, + "indexes": [ { - "fields": ['domain_id', 'data_source_id', 'job_id', 'job_task_id', '-billed_date'], - "name": "COMPOUND_INDEX_FOR_SYNC_JOB_1" + "fields": [ + "domain_id", + "data_source_id", + "job_id", + "job_task_id", + "-billed_date", + ], + "name": "COMPOUND_INDEX_FOR_SYNC_JOB_1", }, { - "fields": ['domain_id', 'data_source_id', 'job_id', '-billed_month'], - "name": "COMPOUND_INDEX_FOR_SYNC_JOB_2" + "fields": ["domain_id", "data_source_id", "job_id", "-billed_month"], + "name": "COMPOUND_INDEX_FOR_SYNC_JOB_2", }, { - "fields": ['domain_id', 'data_source_id', '-billed_date', 'project_id', 'cost'], - "name": "COMPOUND_INDEX_FOR_SEARCH" + "fields": [ + "domain_id", + "data_source_id", + "-billed_date", + "project_id", + "cost", + ], + "name": "COMPOUND_INDEX_FOR_SEARCH", }, { - "fields": ['domain_id', 'cost_id', 'project_id'], - "name": "COMPOUND_INDEX_FOR_DELETE" - } - ] + "fields": ["domain_id", "cost_id", "project_id"], + "name": "COMPOUND_INDEX_FOR_DELETE", + }, + ], } @@ -87,24 +98,34 @@ class MonthlyCost(MongoModel): billed_month = StringField(max_length=7, required=True) meta = { - 'updatable_fields': [], - 'change_query_keys': { - 'user_projects': 'project_id' - }, - 'indexes': [ + "updatable_fields": [], + "change_query_keys": {"user_projects": "project_id"}, + "indexes": [ { - "fields": ['domain_id', 'data_source_id', 'job_id', '-billed_month'], - "name": "COMPOUND_INDEX_FOR_SYNC_JOB" + "fields": ["domain_id", "data_source_id", "job_id", "-billed_month"], + "name": "COMPOUND_INDEX_FOR_SYNC_JOB", }, { - "fields": ['domain_id', 'data_source_id', '-billed_month', 'project_id', 'cost'], - "name": "COMPOUND_INDEX_FOR_SEARCH_1" + "fields": [ + "domain_id", + "data_source_id", + "-billed_month", + "project_id", + "cost", + ], + "name": "COMPOUND_INDEX_FOR_SEARCH_1", }, { - "fields": ['domain_id', 'data_source_id', '-billed_year', 'project_id', 'cost'], - "name": "COMPOUND_INDEX_FOR_SEARCH_2" + "fields": [ + "domain_id", + "data_source_id", + "-billed_year", + "project_id", + "cost", + ], + "name": "COMPOUND_INDEX_FOR_SEARCH_2", }, - ] + ], } @@ -116,13 +137,11 @@ class CostQueryHistory(MongoModel): updated_at = DateTimeField(auto_now=True) meta = { - 'updatable_fields': [ - 'updated_at' - ], - 'indexes': [ + "updatable_fields": ["updated_at"], + "indexes": [ { - "fields": ['domain_id', 'data_source_id', 'query_hash'], - "name": "COMPOUND_INDEX_FOR_SEARCH" + "fields": ["domain_id", "data_source_id", "query_hash"], + "name": "COMPOUND_INDEX_FOR_SEARCH", }, - ] + ], } diff --git a/src/spaceone/cost_analysis/model/data_source_model.py b/src/spaceone/cost_analysis/model/data_source_model.py index afa5990c..3cca2441 100644 --- a/src/spaceone/cost_analysis/model/data_source_model.py +++ b/src/spaceone/cost_analysis/model/data_source_model.py @@ -10,14 +10,18 @@ class PluginInfo(EmbeddedDocument): metadata = DictField(default={}) secret_id = StringField(max_length=40, default=None, null=True) schema = StringField(max_length=255, default=None, null=True) - upgrade_mode = StringField(max_length=255, choices=('AUTO', 'MANUAL'), default='AUTO') + upgrade_mode = StringField( + max_length=255, choices=("AUTO", "MANUAL"), default="AUTO" + ) def to_dict(self): return dict(self.to_mongo()) class SecretFilter(EmbeddedDocument): - state = StringField(max_length=20, default='ENABLED', choices=('ENABLED', 'DISABLED')) + state = StringField( + max_length=20, default="ENABLED", choices=("ENABLED", "DISABLED") + ) secrets = ListField(StringField(max_length=40), defualt=None, null=True) service_accounts = ListField(StringField(max_length=40), default=None, null=True) schemas = ListField(StringField(max_length=40), default=None, null=True) @@ -27,11 +31,17 @@ def to_dict(self): class DataSource(MongoModel): - data_source_id = StringField(max_length=40, generate_id='ds', unique=True) - name = StringField(max_length=255, unique_with='domain_id') - state = StringField(max_length=20, default='ENABLED', choices=('ENABLED', 'DISABLED')) - data_source_type = StringField(max_length=20, choices=('LOCAL', 'EXTERNAL')) - secret_type = StringField(max_length=32, default='MANUAL', choices=('MANUAL', 'USE_SERVICE_ACCOUNT_SECRET')) + data_source_id = StringField(max_length=40, generate_id="ds", unique=True) + name = StringField(max_length=255, unique_with="domain_id") + state = StringField( + max_length=20, default="ENABLED", choices=("ENABLED", "DISABLED") + ) + data_source_type = StringField(max_length=20, choices=("LOCAL", "EXTERNAL")) + secret_type = StringField( + max_length=32, + default="MANUAL", + choices=("MANUAL", "USE_SERVICE_ACCOUNT_SECRET"), + ) secret_filter = EmbeddedDocumentField(SecretFilter, default=None, null=True) provider = StringField(max_length=40, default=None, null=True) plugin_info = EmbeddedDocumentField(PluginInfo, default=None, null=True) @@ -40,39 +50,35 @@ class DataSource(MongoModel): cost_tag_keys = ListField(StringField()) cost_additional_info_keys = ListField(StringField()) cost_data_keys = ListField(StringField()) + resource_group = StringField( + max_length=255, default=None, null=True, choices=("DOMAIN", "WORKSPACE") + ) + workspace_id = StringField(max_length=40) domain_id = StringField(max_length=40) created_at = DateTimeField(auto_now_add=True) last_synchronized_at = DateTimeField(default=None, null=True) meta = { - 'updatable_fields': [ - 'name', - 'state', - 'plugin_info', - 'secret_filter', - 'template', - 'tags', - 'last_synchronized_at', - 'cost_tag_keys', - 'cost_additional_info_keys', - 'cost_data_keys' + "updatable_fields": [ + "name", + "state", + "plugin_info", + "secret_filter", + "template", + "tags", + "last_synchronized_at", + "cost_tag_keys", + "cost_additional_info_keys", + "cost_data_keys", ], - 'minimal_fields': [ - 'data_source_id', - 'name', - 'state', - 'data_source_type', - 'secret_type', - 'provider' + "minimal_fields": [ + "data_source_id", + "name", + "state", + "data_source_type", + "secret_type", + "provider", ], - 'ordering': [ - 'name' - ], - 'indexes': [ - 'name', - 'state', - 'data_source_type', - 'provider', - 'domain_id' - ] + "ordering": ["name"], + "indexes": ["name", "state", "data_source_type", "provider", "domain_id"], } diff --git a/src/spaceone/cost_analysis/model/data_source_rule_model.py b/src/spaceone/cost_analysis/model/data_source_rule_model.py index da6c779e..5c8955b6 100644 --- a/src/spaceone/cost_analysis/model/data_source_rule_model.py +++ b/src/spaceone/cost_analysis/model/data_source_rule_model.py @@ -6,7 +6,7 @@ class DataSourceRuleCondition(EmbeddedDocument): key = StringField(required=True) value = StringField(required=True) - operator = StringField(choices=('eq', 'contain', 'not', 'not_contain')) + operator = StringField(choices=("eq", "contain", "not", "not_contain")) class DataSourceRuleOptions(EmbeddedDocument): @@ -14,43 +14,49 @@ class DataSourceRuleOptions(EmbeddedDocument): class DataSourceRule(MongoModel): - data_source_rule_id = StringField(max_length=40, generate_id='rule', unique=True) - name = StringField(max_length=255, default='') + data_source_rule_id = StringField(max_length=40, generate_id="rule", unique=True) + name = StringField(max_length=255, default="") order = IntField(required=True) conditions = ListField(EmbeddedDocumentField(DataSourceRuleCondition), default=[]) - conditions_policy = StringField(max_length=20, choices=('ALL', 'ANY', 'ALWAYS')) + conditions_policy = StringField(max_length=20, choices=("ALL", "ANY", "ALWAYS")) actions = DictField(default={}) - rule_type = StringField(max_length=255, default='CUSTOM', choices=('MANAGED', 'CUSTOM')) - options = EmbeddedDocumentField(DataSourceRuleOptions, default=DataSourceRuleOptions) + rule_type = StringField( + max_length=255, default="CUSTOM", choices=("MANAGED", "CUSTOM") + ) + options = EmbeddedDocumentField( + DataSourceRuleOptions, default=DataSourceRuleOptions + ) tags = DictField(default={}) - data_source = ReferenceField('DataSource', reverse_delete_rule=CASCADE) + data_source = ReferenceField("DataSource", reverse_delete_rule=CASCADE) + resource_group = StringField(max_length=40, choices=["DOMAIN", "WORKSPACE"]) data_source_id = StringField(max_length=40) + workspace_id = StringField(max_length=40) domain_id = StringField(max_length=40) created_at = DateTimeField(auto_now_add=True) meta = { - 'updatable_fields': [ - 'name', - 'order', - 'conditions', - 'conditions_policy', - 'actions', - 'options', - 'tags' + "updatable_fields": [ + "name", + "order", + "conditions", + "conditions_policy", + "actions", + "options", + "tags", ], - 'minimal_fields': [ - 'data_source_rule_id', - 'name', - 'order', - 'rule_type', - 'data_source_id' + "minimal_fields": [ + "data_source_rule_id", + "name", + "order", + "rule_type", + "data_source_id", ], - 'ordering': ['order'], - 'indexes': [ + "ordering": ["order"], + "indexes": [ # 'data_source_rule_id', - 'order', - 'conditions_policy', - 'data_source_id', - 'domain_id' - ] + "order", + "conditions_policy", + "data_source_id", + "domain_id", + ], } diff --git a/src/spaceone/cost_analysis/model/job_model.py b/src/spaceone/cost_analysis/model/job_model.py index f379ee04..fa175a12 100644 --- a/src/spaceone/cost_analysis/model/job_model.py +++ b/src/spaceone/cost_analysis/model/job_model.py @@ -15,15 +15,20 @@ class Changed(EmbeddedDocument): class Job(MongoModel): - job_id = StringField(max_length=40, generate_id='job', unique=True) - status = StringField(max_length=20, default='IN_PROGRESS', choices=('IN_PROGRESS', 'SUCCESS', 'FAILURE', - 'TIMEOUT', 'CANCELED')) + job_id = StringField(max_length=40, generate_id="job", unique=True) + status = StringField( + max_length=20, + default="IN_PROGRESS", + choices=("IN_PROGRESS", "SUCCESS", "FAILURE", "TIMEOUT", "CANCELED"), + ) options = DictField() error_code = StringField(max_length=254, default=None, null=True) error_message = StringField(default=None, null=True) total_tasks = IntField(default=0) remained_tasks = IntField(default=0) + resource_group = StringField(max_length=40, choices=["DOMAIN", "WORKSPACE"]) data_source_id = StringField(max_length=40, required=True) + workspace_id = StringField(max_length=40, default=None, null=True) domain_id = StringField(max_length=40, required=True) changed = ListField(EmbeddedDocumentField(Changed), default=[]) created_at = DateTimeField(auto_now_add=True) @@ -31,30 +36,28 @@ class Job(MongoModel): finished_at = DateTimeField(default=None, null=True) meta = { - 'updatable_fields': [ - 'status', - 'error_code', - 'error_message', - 'total_tasks', - 'remained_tasks', - 'updated_at', - 'finished_at' + "updatable_fields": [ + "status", + "error_code", + "error_message", + "total_tasks", + "remained_tasks", + "updated_at", + "finished_at", ], - 'minimal_fields': [ - 'job_id', - 'status', - 'total_tasks', - 'remained_tasks', - 'data_source_id' + "minimal_fields": [ + "job_id", + "status", + "total_tasks", + "remained_tasks", + "data_source_id", ], - 'ordering': [ - '-created_at' - ], - 'indexes': [ + "ordering": ["-created_at"], + "indexes": [ # 'job_id', - 'status', - 'data_source_id', - 'domain_id', - 'created_at' - ] + "status", + "data_source_id", + "domain_id", + "created_at", + ], } diff --git a/src/spaceone/cost_analysis/model/job_task_model.py b/src/spaceone/cost_analysis/model/job_task_model.py index ad75f32c..e21a4086 100644 --- a/src/spaceone/cost_analysis/model/job_task_model.py +++ b/src/spaceone/cost_analysis/model/job_task_model.py @@ -4,15 +4,20 @@ class JobTask(MongoModel): - job_task_id = StringField(max_length=40, generate_id='job-task', unique=True) - status = StringField(max_length=20, default='PENDING', - choices=('PENDING', 'IN_PROGRESS', 'SUCCESS', 'FAILURE', 'CANCELED')) + job_task_id = StringField(max_length=40, generate_id="job-task", unique=True) + status = StringField( + max_length=20, + default="PENDING", + choices=("PENDING", "IN_PROGRESS", "SUCCESS", "FAILURE", "CANCELED"), + ) options = DictField() created_count = IntField(default=0) error_code = StringField(max_length=254, default=None, null=True) error_message = StringField(default=None, null=True) + resource_group = StringField(max_length=40, choices=["DOMAIN", "WORKSPACE"]) job_id = StringField(max_length=40, required=True) data_source_id = StringField(max_length=40, required=True) + workspcae_id = StringField(max_length=40, default=None, null=True) domain_id = StringField(max_length=40, required=True) created_at = DateTimeField(auto_now_add=True) started_at = DateTimeField(default=None, null=True) @@ -20,31 +25,29 @@ class JobTask(MongoModel): finished_at = DateTimeField(default=None, null=True) meta = { - 'updatable_fields': [ - 'status', - 'created_count', - 'error_code', - 'error_message', - 'started_at', - 'updated_at', - 'finished_at' + "updatable_fields": [ + "status", + "created_count", + "error_code", + "error_message", + "started_at", + "updated_at", + "finished_at", ], - 'minimal_fields': [ - 'job_task_id', - 'status', - 'created_count', - 'job_id', - 'data_source_id' + "minimal_fields": [ + "job_task_id", + "status", + "created_count", + "job_id", + "data_source_id", ], - 'ordering': [ - '-created_at' - ], - 'indexes': [ + "ordering": ["-created_at"], + "indexes": [ # 'job_task_id', - 'status', - 'job_id', - 'data_source_id', - 'domain_id', - 'created_at' - ] + "status", + "job_id", + "data_source_id", + "domain_id", + "created_at", + ], } diff --git a/src/spaceone/cost_analysis/service/budget_service.py b/src/spaceone/cost_analysis/service/budget_service.py index 2e022e9f..e9ac20ba 100644 --- a/src/spaceone/cost_analysis/service/budget_service.py +++ b/src/spaceone/cost_analysis/service/budget_service.py @@ -19,13 +19,27 @@ @mutation_handler @event_handler class BudgetService(BaseService): + resource = "Budget" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.budget_mgr: BudgetManager = self.locator.get_manager('BudgetManager') - - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['data_source_id', 'time_unit', 'start', 'end', 'domain_id']) + self.budget_mgr: BudgetManager = self.locator.get_manager("BudgetManager") + + @transaction( + permission="cost-analysis:Budget.write", + role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required( + [ + "data_source_id", + "time_unit", + "start", + "end", + "resource_group", + "workspace_id", + "domain_id", + ] + ) def create(self, params): """Register budget @@ -33,8 +47,6 @@ def create(self, params): params (dict): { 'data_source_id': 'str', 'name': 'str', - 'project_id': 'str', - 'project_group_id': 'str', 'limit': 'float', 'planned_limits': 'list', 'time_unit': 'str', @@ -43,83 +55,98 @@ def create(self, params): 'provider_filter': 'dict', 'notifications': 'list', 'tags': 'dict', - 'domain_id': 'str' + 'resource_group': 'str', + 'project_id': 'str', + 'workspace_id': 'str', # injected from auth + 'domain_id': 'str' # injected from auth } Returns: budget_vo (object) """ - domain_id = params['domain_id'] - data_source_id = params['data_source_id'] - project_id = params.get('project_id') - project_group_id = params.get('project_group_id') - limit = params.get('limit') - planned_limits = params.get('planned_limits', []) - time_unit = params['time_unit'] - start = params['start'] - end = params['end'] - provider_filter = params.get('provider_filter', {}) - provider_filter_state = provider_filter.get('state', 'DISABLED') - notifications = params.get('notifications', []) - - self._check_target(project_id, project_group_id, domain_id) + domain_id = params["domain_id"] + workspace_id = params["workspace_id"] + data_source_id = params["data_source_id"] + project_id = params.get("project_id") + limit = params.get("limit") + planned_limits = params.get("planned_limits", []) + time_unit = params["time_unit"] + start = params["start"] + end = params["end"] + provider_filter = params.get("provider_filter", {}) + provider_filter_state = provider_filter.get("state", "DISABLED") + notifications = params.get("notifications", []) + resource_group = params["resource_group"] + # self._check_target(project_id, project_group_id, domain_id) self._check_time_period(start, end) + if resource_group == "PROJECT": + identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") + identity_mgr.get_project(project_id) + # Check Provider Filter - if provider_filter_state == 'ENABLED': - if len(provider_filter.get('providers', [])) == 0: + if provider_filter_state == "ENABLED": + if len(provider_filter.get("providers", [])) == 0: raise ERROR_PROVIDER_FILTER_IS_EMPTY() else: - params['provider_filter'] = { - 'state': 'DISABLED', - 'providers': [] - } + params["provider_filter"] = {"state": "DISABLED", "providers": []} - data_source_mgr: DataSourceManager = self.locator.get_manager('DataSourceManager') - data_source_vo: DataSource = data_source_mgr.get_data_source(data_source_id, domain_id) + data_source_mgr: DataSourceManager = self.locator.get_manager( + "DataSourceManager" + ) + data_source_vo: DataSource = data_source_mgr.get_data_source( + data_source_id, domain_id + ) data_source_metadata = data_source_vo.plugin_info.metadata - params['currency'] = data_source_metadata.get('currency', 'USD') + params["currency"] = data_source_metadata.get("currency", "USD") - if time_unit == 'TOTAL': + if time_unit == "TOTAL": if limit is None: - raise ERROR_REQUIRED_PARAMETER(key='limit') + raise ERROR_REQUIRED_PARAMETER(key="limit") - params['planned_limits'] = None + params["planned_limits"] = None else: # Check Planned Limits self._check_planned_limits(start, end, time_unit, planned_limits) - params['limit'] = 0 + params["limit"] = 0 for planned_limit in planned_limits: - params['limit'] += planned_limit.get('limit', 0) + params["limit"] += planned_limit.get("limit", 0) # Check Notifications - self._check_notifications(notifications, project_id, project_group_id) + self._check_notifications(notifications, project_id) # Check Duplicated Budget budget_vos = self.budget_mgr.filter_budgets( data_source_id=data_source_id, project_id=project_id, - project_group_id=project_group_id, - domain_id=domain_id + workspace_id=workspace_id, + domain_id=domain_id, ) if budget_vos.count() > 0: - raise ERROR_BUDGET_ALREADY_EXIST(data_source_id=data_source_id, target=project_id or project_group_id) + raise ERROR_BUDGET_ALREADY_EXIST( + data_source_id=data_source_id, target=project_id + ) budget_vo = self.budget_mgr.create_budget(params) # Create budget usages - budget_usage_mgr: BudgetUsageManager = self.locator.get_manager('BudgetUsageManager') - budget_usage_mgr.create_budget_usages(budget_vo) + budget_usage_mgr: BudgetUsageManager = self.locator.get_manager( + "BudgetUsageManager" + ) + budget_usage_vo = budget_usage_mgr.create_budget_usages(budget_vo) budget_usage_mgr.update_cost_usage(budget_vo.budget_id, budget_vo.domain_id) budget_usage_mgr.notify_budget_usage(budget_vo) return budget_vo - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['budget_id', 'domain_id']) + @transaction( + permission="cost-analysis:Budget.write", + role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["budget_id", "workspace_id", "domain_id"]) # @change_date_value(['end']) def update(self, params): """Update budget @@ -131,6 +158,8 @@ def update(self, params): 'limit': 'float', 'planned_limits': 'list', 'tags': 'dict' + 'project_id': 'str', + 'workspace_id', 'str', 'domain_id': 'str' } @@ -138,30 +167,42 @@ def update(self, params): budget_vo (object) """ - budget_id = params['budget_id'] - domain_id = params['domain_id'] - planned_limits = params.get('planned_limits') + budget_id = params["budget_id"] + workspace_id = params["workspace_id"] + domain_id = params["domain_id"] + planned_limits = params.get("planned_limits") - budget_usage_mgr: BudgetUsageManager = self.locator.get_manager('BudgetUsageManager') + budget_usage_mgr: BudgetUsageManager = self.locator.get_manager( + "BudgetUsageManager" + ) - budget_vo: Budget = self.budget_mgr.get_budget(budget_id, domain_id) + budget_vo: Budget = self.budget_mgr.get_budget( + budget_id, workspace_id, domain_id + ) # Check limit and Planned Limits budget_vo = self.budget_mgr.update_budget_by_vo(params, budget_vo) - if 'name' in params: - budget_usage_vos = budget_usage_mgr.filter_budget_usages(budget_id=budget_id) + if "name" in params: + budget_usage_vos = budget_usage_mgr.filter_budget_usages( + budget_id=budget_id + ) for budget_usage_vo in budget_usage_vos: - budget_usage_mgr.update_budget_usage_by_vo({'name': params['name']}, budget_usage_vo) + budget_usage_mgr.update_budget_usage_by_vo( + {"name": params["name"]}, budget_usage_vo + ) budget_usage_mgr.update_cost_usage(budget_vo.budget_id, budget_vo.domain_id) budget_usage_mgr.notify_budget_usage(budget_vo) return budget_vo - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['budget_id', 'domain_id']) + @transaction( + permission="cost-analysis:Budget.write", + role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["budget_id", "workspace_id", "domain_id"]) def set_notification(self, params): """Set budget notification @@ -169,32 +210,40 @@ def set_notification(self, params): params (dict): { 'budget_id': 'str', 'notifications': 'list', + 'workspace_id': 'str', 'domain_id': 'str' } Returns: budget_vo (object) """ - budget_id = params['budget_id'] - domain_id = params['domain_id'] - notifications = params.get('notifications', []) + budget_id = params["budget_id"] + workspace_id = params["workspace_id"] + domain_id = params["domain_id"] + notifications = params.get("notifications", []) - budget_vo: Budget = self.budget_mgr.get_budget(budget_id, domain_id) + budget_vo: Budget = self.budget_mgr.get_budget( + budget_id, workspace_id, domain_id + ) # Check Notifications - self._check_notifications(notifications, budget_vo.project_id, budget_vo.project_group_id) - params['notifications'] = notifications + self._check_notifications(notifications, budget_vo.project_id) + params["notifications"] = notifications return self.budget_mgr.update_budget_by_vo(params, budget_vo) - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['budget_id', 'domain_id']) + @transaction( + permission="cost-analysis:Budget.write", + role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["budget_id", "workspace_id", "domain_id"]) def delete(self, params): """Deregister budget Args: params (dict): { 'budget_id': 'str', + 'workspace_id': 'str', 'domain_id': 'str' } @@ -202,47 +251,67 @@ def delete(self, params): None """ - self.budget_mgr.delete_budget(params['budget_id'], params['domain_id']) + self.budget_mgr.delete_budget( + params["budget_id"], params["workspace_id"], params["domain_id"] + ) - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['budget_id', 'domain_id']) + @transaction( + permission="cost-analysis:Budget.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["budget_id", "domain_id"]) def get(self, params): - """ Get budget + """Get budget Args: params (dict): { 'budget_id': 'str', + 'workspace_id': 'str', 'domain_id': 'str', - 'only': 'list } Returns: budget_vo (object) """ - budget_id = params['budget_id'] - domain_id = params['domain_id'] - - return self.budget_mgr.get_budget(budget_id, domain_id, params.get('only')) - - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['domain_id']) - @append_query_filter(['budget_id', 'name', 'project_id', 'project_group_id', 'data_source_id', 'domain_id']) - @append_keyword_filter(['budget_id', 'name']) + budget_id = params["budget_id"] + domain_id = params["domain_id"] + workspace_id = params.get("workspace_id") + + return self.budget_mgr.get_budget(budget_id, domain_id, workspace_id) + + @transaction( + permission="cost-analysis:Budget.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["domain_id"]) + @append_query_filter( + [ + "budget_id", + "name", + "time_unit", + "data_source_id", + "project_id", + "workspace_id", + "domain_id", + ] + ) + @append_keyword_filter(["budget_id", "name"]) def list(self, params): - """ List budgets + """List budgets Args: params (dict): { + 'query': 'dict (spaceone.api.core.v1.Query)', 'budget_id': 'str', 'name': 'str', - 'project_id': 'str', + 'time_unit': 'str', 'project_group_id': 'str', - 'data_source_id': 'str', + 'project_id': 'str', + 'workspace_id': 'str', 'domain_id': 'str', - 'query': 'dict (spaceone.api.core.v1.Query)', - 'user_projects': 'list', // from meta, - 'user_project_groups': 'list', // from meta + 'data_source_id': 'str', } Returns: @@ -253,10 +322,13 @@ def list(self, params): query = self._set_user_project_or_project_group_filter(params) return self.budget_mgr.list_budgets(query) - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['query', 'domain_id']) - @append_query_filter(['domain_id']) - @append_keyword_filter(['budget_id', 'name']) + @transaction( + permission="cost-analysis:Budget.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["query", "domain_id"]) + @append_query_filter(["domain_id"]) + @append_keyword_filter(["budget_id", "name"]) def stat(self, params): """ Args: @@ -277,12 +349,12 @@ def stat(self, params): def _check_target(self, project_id, project_group_id, domain_id): if project_id is None and project_group_id is None: - raise ERROR_REQUIRED_PARAMETER(key='project_id or project_group_id') + raise ERROR_REQUIRED_PARAMETER(key="project_id or project_group_id") if project_id and project_group_id: raise ERROR_ONLY_ONF_OF_PROJECT_OR_PROJECT_GROUP() - identity_mgr: IdentityManager = self.locator.get_manager('IdentityManager') + identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") if project_id: identity_mgr.get_project(project_id, domain_id) @@ -296,17 +368,17 @@ def _check_time_period(start, end): def _check_planned_limits(self, start, end, time_unit, planned_limits): planned_limits_dict = self._convert_planned_limits_data_type(planned_limits) - date_format = '%Y-%m' + date_format = "%Y-%m" try: start_dt = datetime.strptime(start, date_format) except Exception as e: - raise ERROR_INVALID_PARAMETER_TYPE(key='start', type=date_format) + raise ERROR_INVALID_PARAMETER_TYPE(key="start", type=date_format) try: end_dt = datetime.strptime(end, date_format) except Exception as e: - raise ERROR_INVALID_PARAMETER_TYPE(key='end', type=date_format) + raise ERROR_INVALID_PARAMETER_TYPE(key="end", type=date_format) for dt in rrule(MONTHLY, dtstart=start_dt, until=end_dt): date_str = dt.strftime(date_format) @@ -323,8 +395,8 @@ def _convert_planned_limits_data_type(planned_limits): planned_limits_dict = {} for planned_limit in planned_limits: - date = planned_limit.get('date') - limit = planned_limit.get('limit', 0) + date = planned_limit.get("date") + limit = planned_limit.get("limit", 0) if date is None: raise ERROR_DATE_IS_REQUIRED(value=planned_limit) @@ -336,39 +408,43 @@ def _convert_planned_limits_data_type(planned_limits): return planned_limits_dict @staticmethod - def _check_notifications(notifications, project_id, project_group_id): - if len(notifications) > 0 and project_group_id and project_id is None: - raise ERROR_NOTIFICATION_IS_NOT_SUPPORTED_IN_PROJECT_GROUP(target=project_group_id) + def _check_notifications(notifications, project_id): + if len(notifications) > 0 and project_id is None: + raise ERROR_NOTIFICATION_IS_NOT_SUPPORTED_IN_PROJECT(target=project_id) for notification in notifications: - unit = notification.get('unit') - notification_type = notification.get('notification_type') - threshold = notification.get('threshold', 0) + unit = notification.get("unit") + notification_type = notification.get("notification_type") + threshold = notification.get("threshold", 0) - if unit not in ['PERCENT', 'ACTUAL_COST']: + if unit not in ["PERCENT", "ACTUAL_COST"]: raise ERROR_UNIT_IS_REQUIRED(value=notification) - if notification_type not in ['CRITICAL', 'WARNING']: + if notification_type not in ["CRITICAL", "WARNING"]: raise ERROR_NOTIFICATION_TYPE_IS_REQUIRED(value=notification) if threshold < 0: raise ERROR_THRESHOLD_IS_WRONG(value=notification) - if unit == 'PERCENT': + if unit == "PERCENT": if threshold > 100: raise ERROR_THRESHOLD_IS_WRONG_IN_PERCENT_TYPE(value=notification) @staticmethod def _set_user_project_or_project_group_filter(params): - query = params.get('query', {}) - query['filter'] = query.get('filter', []) - - if 'user_projects' in params: - user_projects = params['user_projects'] + [None] - query['filter'].append({'k': 'user_projects', 'v': user_projects, 'o': 'in'}) - - if 'user_project_groups' in params: - user_project_groups = params['user_project_groups'] + [None] - query['filter'].append({'k': 'user_project_groups', 'v': user_project_groups, 'o': 'in'}) + query = params.get("query", {}) + query["filter"] = query.get("filter", []) + + if "user_projects" in params: + user_projects = params["user_projects"] + [None] + query["filter"].append( + {"k": "user_projects", "v": user_projects, "o": "in"} + ) + + if "user_project_groups" in params: + user_project_groups = params["user_project_groups"] + [None] + query["filter"].append( + {"k": "user_project_groups", "v": user_project_groups, "o": "in"} + ) return query diff --git a/src/spaceone/cost_analysis/service/budget_usage_service.py b/src/spaceone/cost_analysis/service/budget_usage_service.py index 614cee5b..bdc76446 100644 --- a/src/spaceone/cost_analysis/service/budget_usage_service.py +++ b/src/spaceone/cost_analysis/service/budget_usage_service.py @@ -12,29 +12,43 @@ @mutation_handler @event_handler class BudgetUsageService(BaseService): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.budget_usage_mgr: BudgetUsageManager = self.locator.get_manager('BudgetUsageManager') - - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['domain_id']) - @append_query_filter(['budget_id', 'data_source_id', 'name', 'date', 'domain_id']) - @append_keyword_filter(['budget_id', 'name']) + self.budget_usage_mgr: BudgetUsageManager = self.locator.get_manager( + "BudgetUsageManager" + ) + + @transaction( + permission="cost-analysis:BudgetUsage.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["domain_id"]) + @append_query_filter( + [ + "budget_id", + "data_source_id", + "name", + "date", + "project_id", + "workspace_id", + "domain_id", + ] + ) + @append_keyword_filter(["budget_id", "name"]) @set_query_page_limit(1000) def list(self, params): - """ List budget_usages + """List budget_usages Args: params (dict): { + 'query': 'dict (spaceone.api.core.v1.Query)', 'budget_id': 'str', 'data_source_id': 'str', 'name': 'str', 'date': 'str', + 'project_id': 'str', + 'workspace_id': str, 'domain_id': 'str', - 'query': 'dict (spaceone.api.core.v1.Query)', - 'user_projects': 'list', // from meta, - 'user_project_groups': 'list', // from meta } Returns: @@ -45,20 +59,22 @@ def list(self, params): query = self._set_user_project_or_project_group_filter(params) return self.budget_usage_mgr.list_budget_usages(query) - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['query', 'domain_id']) - @append_query_filter(['data_source_id', 'domain_id']) - @append_keyword_filter(['budget_id', 'name']) + @transaction( + permission="cost-analysis:BudgetUsage.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["query", "domain_id"]) + @append_query_filter(["budget_id", "data_source_id", "domain_id"]) + @append_keyword_filter(["budget_id", "name"]) @set_query_page_limit(1000) def stat(self, params): """ Args: params (dict): { - 'data_source_id': 'str', - 'domain_id': 'str', 'query': 'dict (spaceone.api.core.v1.StatisticsQuery)', - 'user_projects': 'list', // from meta, - 'user_project_groups': 'list' // from meta + "budget_id": "str", + 'data_source_id': 'str', + 'domain_id': 'str' } Returns: @@ -69,19 +85,20 @@ def stat(self, params): query = self._set_user_project_or_project_group_filter(params) return self.budget_usage_mgr.stat_budget_usages(query) - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['query', 'query.fields', 'domain_id']) - @append_query_filter(['data_source_id', 'domain_id']) - @append_keyword_filter(['budget_id', 'name']) + @transaction( + permission="cost-analysis:BudgetUsage.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["query", "query.fields", "domain_id"]) + @append_query_filter(["data_source_id", "domain_id"]) + @append_keyword_filter(["budget_id", "name"]) @set_query_page_limit(1000) def analyze(self, params): """ Args: params (dict): { 'query': 'dict (spaceone.api.core.v1.TimeSeriesAnalyzeQuery)', - 'domain_id': 'str', - 'user_projects': 'list', // from meta - 'user_project_groups': 'list' // from meta + 'domain_id': 'str' } Returns: @@ -90,27 +107,32 @@ def analyze(self, params): """ query = self._set_user_project_or_project_group_filter(params) - self._check_granularity(query.get('granularity')) + self._check_granularity(query.get("granularity")) return self.budget_usage_mgr.analyze_budget_usages(query) @staticmethod def _check_granularity(granularity): - if granularity and granularity != 'MONTHLY': - raise ERROR_INVALID_PARAMETER(key='query.granularity', - reason='Granularity is only MONTHLY.') + if granularity and granularity != "MONTHLY": + raise ERROR_INVALID_PARAMETER( + key="query.granularity", reason="Granularity is only MONTHLY." + ) @staticmethod def _set_user_project_or_project_group_filter(params): - query = params.get('query', {}) - query['filter'] = query.get('filter', []) - - if 'user_projects' in params: - user_projects = params['user_projects'] + [None] - query['filter'].append({'k': 'user_projects', 'v': user_projects, 'o': 'in'}) - - if 'user_project_groups' in params: - user_project_groups = params['user_project_groups'] + [None] - query['filter'].append({'k': 'user_project_groups', 'v': user_project_groups, 'o': 'in'}) + query = params.get("query", {}) + query["filter"] = query.get("filter", []) + + if "user_projects" in params: + user_projects = params["user_projects"] + [None] + query["filter"].append( + {"k": "user_projects", "v": user_projects, "o": "in"} + ) + + if "user_project_groups" in params: + user_project_groups = params["user_project_groups"] + [None] + query["filter"].append( + {"k": "user_project_groups", "v": user_project_groups, "o": "in"} + ) return query diff --git a/src/spaceone/cost_analysis/service/cost_query_set_service.py b/src/spaceone/cost_analysis/service/cost_query_set_service.py index 3f5504e9..7abc04bc 100644 --- a/src/spaceone/cost_analysis/service/cost_query_set_service.py +++ b/src/spaceone/cost_analysis/service/cost_query_set_service.py @@ -12,14 +12,18 @@ @mutation_handler @event_handler class CostQuerySetService(BaseService): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.cost_query_set_mgr: CostQuerySetManager = self.locator.get_manager('CostQuerySetManager') - - @transaction(append_meta={'authorization.scope': 'USER'}) - @check_required(['data_source_id', 'name', 'options', 'domain_id']) - @change_date_value(['start', 'end']) + self.cost_query_set_mgr: CostQuerySetManager = self.locator.get_manager( + "CostQuerySetManager" + ) + + @transaction( + permission="cost-analysis:CostQuerySet.write", + role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["data_source_id", "name", "options", "user_id", "domain_id"]) + @change_date_value(["start", "end"]) def create(self, params): """Register cost_query_set @@ -29,6 +33,7 @@ def create(self, params): 'name': 'str', 'options': 'str', 'tags': 'dict', + 'user_id': 'str', 'domain_id': 'str' } @@ -36,13 +41,14 @@ def create(self, params): cost_query_set_vo (object) """ - params['user_id'] = self.transaction.get_meta('user_id') - return self.cost_query_set_mgr.create_cost_query_set(params) - @transaction(append_meta={'authorization.scope': 'USER'}) - @check_required(['cost_query_set_id', 'domain_id']) - @change_date_value(['end']) + @transaction( + permission="cost-analysis:CostQuerySet.write", + role_types=["USER"], + ) + @check_required(["cost_query_set_id", "user_id", "domain_id"]) + @change_date_value(["end"]) def update(self, params): """Update cost_query_set @@ -52,63 +58,85 @@ def update(self, params): 'name': 'str', 'options': 'dict', 'tags': 'dict' + 'user_id': 'str', 'domain_id': 'str' } Returns: cost_query_set_vo (object) """ - cost_query_set_id = params['cost_query_set_id'] - domain_id = params['domain_id'] - - cost_query_set_vo: CostQuerySet = self.cost_query_set_mgr.get_cost_query_set(cost_query_set_id, domain_id) - - return self.cost_query_set_mgr.update_cost_query_set_by_vo(params, cost_query_set_vo) - - @transaction(append_meta={'authorization.scope': 'USER'}) - @check_required(['cost_query_set_id', 'domain_id']) + cost_query_set_id = params["cost_query_set_id"] + user_id = params["user_id"] + domain_id = params["domain_id"] + + cost_query_set_vo: CostQuerySet = self.cost_query_set_mgr.get_cost_query_set( + cost_query_set_id, user_id, domain_id + ) + + return self.cost_query_set_mgr.update_cost_query_set_by_vo( + params, cost_query_set_vo + ) + + @transaction( + permission="cost-analysis:CostQuerySet.write", + role_types=["USER"], + ) + @check_required(["cost_query_set_id", "domain_id"]) def delete(self, params): """Deregister cost_query_set Args: params (dict): { 'cost_query_set_id': 'str', - 'domain_id': 'str' + 'domain_id': 'str' # injected from auth } Returns: None """ - self.cost_query_set_mgr.delete_cost_query_set(params['cost_query_set_id'], params['domain_id']) + self.cost_query_set_mgr.delete_cost_query_set( + params["cost_query_set_id"], params["domain_id"] + ) - @transaction(append_meta={'authorization.scope': 'USER'}) - @check_required(['cost_query_set_id', 'domain_id']) + @transaction( + permission="cost-analysis:CostQuerySet.read", + role_types=["USER"], + ) + @check_required(["cost_query_set_id", "user_id", "domain_id"]) def get(self, params): - """ Get cost_query_set + """Get cost_query_set Args: params (dict): { 'cost_query_set_id': 'str', - 'domain_id': 'str', - 'only': 'list + 'user_id': 'str', # injected from auth + 'domain_id': 'str' # injected from auth } Returns: cost_query_set_vo (object) """ - cost_query_set_id = params['cost_query_set_id'] - domain_id = params['domain_id'] - - return self.cost_query_set_mgr.get_cost_query_set(cost_query_set_id, domain_id, params.get('only')) - - @transaction(append_meta={'authorization.scope': 'USER'}) - @check_required(['domain_id']) - @append_query_filter(['data_source_id', 'cost_query_set_id', 'name', 'user_id', 'domain_id']) - @append_keyword_filter(['cost_query_set_id', 'name']) + cost_query_set_id = params["cost_query_set_id"] + user_id = params["user_id"] + domain_id = params["domain_id"] + + return self.cost_query_set_mgr.get_cost_query_set( + cost_query_set_id, user_id, domain_id + ) + + @transaction( + permission="cost-analysis:CostQuerySet.read", + role_types=["USER"], + ) + @check_required(["data_source_id", "domain_id"]) + @append_query_filter( + ["data_source_id", "cost_query_set_id", "name", "user_id", "domain_id"] + ) + @append_keyword_filter(["cost_query_set_id", "name"]) def list(self, params): - """ List cost_query_sets + """List cost_query_sets Args: params (dict): { @@ -125,13 +153,16 @@ def list(self, params): total_count """ - query = params.get('query', {}) + query = params.get("query", {}) return self.cost_query_set_mgr.list_cost_query_sets(query) - @transaction(append_meta={'authorization.scope': 'USER'}) - @check_required(['query', 'data_source_id', 'domain_id']) - @append_query_filter(['data_source_id', 'domain_id']) - @append_keyword_filter(['cost_query_set_id', 'name']) + @transaction( + permission="cost-analysis:CostQuerySet.read", + role_types=["USER"], + ) + @check_required(["query", "data_source_id", "domain_id"]) + @append_query_filter(["data_source_id", "domain_id"]) + @append_keyword_filter(["cost_query_set_id", "name"]) def stat(self, params): """ Args: @@ -145,5 +176,5 @@ def stat(self, params): """ - query = params.get('query', {}) + query = params.get("query", {}) return self.cost_query_set_mgr.stat_cost_query_sets(query) diff --git a/src/spaceone/cost_analysis/service/cost_service.py b/src/spaceone/cost_analysis/service/cost_service.py index 0ad5d9ed..9d8c66a1 100644 --- a/src/spaceone/cost_analysis/service/cost_service.py +++ b/src/spaceone/cost_analysis/service/cost_service.py @@ -15,13 +15,14 @@ @mutation_handler @event_handler class CostService(BaseService): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.cost_mgr: CostManager = self.locator.get_manager('CostManager') + self.cost_mgr: CostManager = self.locator.get_manager("CostManager") - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['cost', 'data_source_id', 'billed_date', 'domain_id']) + @transaction(permission="cost-analysis:Cost.write", role_types=["WORKSPACE_OWNER"]) + @check_required( + ["cost", "data_source_id", "billed_date", "project_id", "domain_id"] + ) def create(self, params): """Register cost @@ -49,15 +50,17 @@ def create(self, params): """ # validation check (service_account_id / project_id / data_source_id) + identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") + identity_mgr.get_project(params["project_id"]) cost_vo: Cost = self.cost_mgr.create_cost(params) - self.cost_mgr.remove_stat_cache(params['domain_id'], params['data_source_id']) + self.cost_mgr.remove_stat_cache(params["domain_id"], params["data_source_id"]) return cost_vo - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['cost_id', 'domain_id']) + @transaction(permission="cost-analysis:Cost.write", role_types=["WORKSPACE_OWNER"]) + @check_required(["cost_id", "domain_id"]) def delete(self, params): """Deregister cost @@ -71,44 +74,67 @@ def delete(self, params): None """ - domain_id = params['domain_id'] + domain_id = params["domain_id"] - cost_vo: Cost = self.cost_mgr.get_cost(params['cost_id'], params['domain_id']) + cost_vo: Cost = self.cost_mgr.get_cost(params["cost_id"], params["domain_id"]) self.cost_mgr.remove_stat_cache(domain_id, cost_vo.data_source_id) self.cost_mgr.delete_cost_by_vo(cost_vo) - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['cost_id', 'domain_id']) + @transaction( + permission="cost-analysis:Cost.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_OWNER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["cost_id", "domain_id"]) def get(self, params): - """ Get cost + """Get cost Args: params (dict): { 'cost_id': 'str', + 'workspace_id': 'str', 'domain_id': 'str', - 'only': 'list } Returns: cost_vo (object) """ - cost_id = params['cost_id'] - domain_id = params['domain_id'] - - return self.cost_mgr.get_cost(cost_id, domain_id, params.get('only')) - - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['data_source_id', 'domain_id']) - @append_query_filter(['cost_id', 'provider', 'region_code', 'region_key', 'product', 'usage_type', 'resource', - 'service_account_id', 'project_id', 'project_group_id', 'data_source_id', 'domain_id', - 'user_projects']) - @append_keyword_filter(['cost_id']) + cost_id = params["cost_id"] + workspace_id = params.get("workspace_id") + domain_id = params["domain_id"] + + return self.cost_mgr.get_cost(cost_id, domain_id, workspace_id) + + @transaction( + permission="cost-analysis:Cost.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_OWNER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["data_source_id", "domain_id"]) + @append_query_filter( + [ + "cost_id", + "provider", + "region_code", + "region_key", + "product", + "usage_type", + "resource", + "service_account_id", + "project_id", + "project_group_id", + "data_source_id", + "domain_id", + "user_projects", + ] + ) + @append_keyword_filter(["cost_id"]) @set_query_page_limit(1000) def list(self, params): - """ List costs + """List costs Args: params (dict): { @@ -133,14 +159,27 @@ def list(self, params): total_count """ - query = params.get('query', {}) + query = params.get("query", {}) return self.cost_mgr.list_costs(query) - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['query', 'query.granularity', 'query.start', 'query.end', 'query.fields', 'data_source_id', - 'domain_id']) - @append_query_filter(['data_source_id', 'domain_id', 'user_projects']) - @append_keyword_filter(['cost_id']) + @transaction( + permission="cost-analysis:Cost.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_OWNER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required( + [ + "query", + "query.granularity", + "query.start", + "query.end", + "query.fields", + "data_source_id", + "domain_id", + ] + ) + @append_query_filter(["data_source_id", "domain_id", "user_projects"]) + @append_keyword_filter(["cost_id"]) @set_query_page_limit(1000) def analyze(self, params): """ @@ -157,16 +196,22 @@ def analyze(self, params): """ - domain_id = params['domain_id'] - data_source_id = params['data_source_id'] - query = params.get('query', {}) - - return self.cost_mgr.analyze_costs_by_granularity(query, domain_id, data_source_id) - - @transaction(append_meta={'authorization.scope': 'PROJECT'}) - @check_required(['query', 'domain_id']) - @append_query_filter(['data_source_id', 'domain_id', 'user_projects']) - @append_keyword_filter(['cost_id']) + domain_id = params["domain_id"] + data_source_id = params["data_source_id"] + query = params.get("query", {}) + + return self.cost_mgr.analyze_costs_by_granularity( + query, domain_id, data_source_id + ) + + @transaction( + permission="cost-analysis:Cost.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_OWNER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["query", "domain_id"]) + @append_query_filter(["data_source_id", "domain_id", "user_projects"]) + @append_keyword_filter(["cost_id"]) @set_query_page_limit(1000) def stat(self, params): """ @@ -183,17 +228,21 @@ def stat(self, params): """ - domain_id = params['domain_id'] - query = params.get('query', {}) + domain_id = params["domain_id"] + query = params.get("query", {}) if self._is_distinct_query(query): page, query = self._get_page_from_query(query) search, query = self._get_search_value_from_query(query) query_hash = utils.dict_to_hash(query) - self.cost_mgr.create_cost_query_history(query, query_hash, domain_id, 'global') + self.cost_mgr.create_cost_query_history( + query, query_hash, domain_id, "global" + ) - response = self.cost_mgr.stat_monthly_costs_with_cache(query, query_hash, domain_id, 'global') + response = self.cost_mgr.stat_monthly_costs_with_cache( + query, query_hash, domain_id, "global" + ) if search: response = self._search_results(response, search) @@ -203,20 +252,20 @@ def stat(self, params): return response else: - raise ERROR_NOT_SUPPORT_QUERY_OPTION(query_option='aggregate') + raise ERROR_NOT_SUPPORT_QUERY_OPTION(query_option="aggregate") @staticmethod def _is_distinct_query(query): - if 'distinct' in query: + if "distinct" in query: return True else: return False @staticmethod def _get_page_from_query(query): - if 'page' in query: - page = query['page'] - del query['page'] + if "page" in query: + page = query["page"] + del query["page"] else: page = None @@ -224,21 +273,21 @@ def _get_page_from_query(query): @staticmethod def _get_search_value_from_query(query): - distinct = query['distinct'] + distinct = query["distinct"] search = None changed_filter = [] - for condition in query.get('filter', []): - key = condition.get('key', condition.get('k')) - value = condition.get('value', condition.get('v')) - operator = condition.get('operator', condition.get('o')) + for condition in query.get("filter", []): + key = condition.get("key", condition.get("k")) + value = condition.get("value", condition.get("v")) + operator = condition.get("operator", condition.get("o")) - if key == distinct and operator == 'contain': + if key == distinct and operator == "contain": search = value else: changed_filter.append(condition) - query['filter'] = changed_filter + query["filter"] = changed_filter return search, query @@ -247,28 +296,26 @@ def _search_results(response, search): search = search.lower() changed_results = [] - for result in response.get('results', []): + for result in response.get("results", []): if search in result.lower(): changed_results.append(result) return { - 'results': changed_results, + "results": changed_results, } @staticmethod def _page_results(response, page): - results = response.get('results', []) - response = { - 'total_count': len(results) - } + results = response.get("results", []) + response = {"total_count": len(results)} - if 'limit' in page and page['limit'] > 0: - start = page.get('start', 1) + if "limit" in page and page["limit"] > 0: + start = page.get("start", 1) if start < 1: start = 1 - response['results'] = results[start - 1:start + page['limit'] - 1] + response["results"] = results[start - 1 : start + page["limit"] - 1] else: - response['results'] = results + response["results"] = results return response diff --git a/src/spaceone/cost_analysis/service/data_source_rule_service.py b/src/spaceone/cost_analysis/service/data_source_rule_service.py index 84711050..74764ca1 100644 --- a/src/spaceone/cost_analysis/service/data_source_rule_service.py +++ b/src/spaceone/cost_analysis/service/data_source_rule_service.py @@ -3,16 +3,27 @@ from spaceone.core.service import * from spaceone.cost_analysis.error import * -from spaceone.cost_analysis.manager.data_source_rule_manager import DataSourceRuleManager +from spaceone.cost_analysis.manager.data_source_rule_manager import ( + DataSourceRuleManager, +) from spaceone.cost_analysis.manager.data_source_manager import DataSourceManager from spaceone.cost_analysis.manager.identity_manager import IdentityManager from spaceone.cost_analysis.model.data_source_rule_model import DataSourceRule _LOGGER = logging.getLogger(__name__) -_SUPPORTED_CONDITION_KEYS = ['provider', 'region_code', 'product', 'account', 'usage_type', 'resource_group', - 'resource', 'tags.', 'additional_info.'] -_SUPPORTED_CONDITION_OPERATORS = ['eq', 'contain', 'not', 'not_contain'] +_SUPPORTED_CONDITION_KEYS = [ + "provider", + "region_code", + "product", + "account", + "usage_type", + "resource_group", + "resource", + "tags.", + "additional_info.", +] +_SUPPORTED_CONDITION_OPERATORS = ["eq", "contain", "not", "not_contain"] @authentication_handler @@ -20,12 +31,16 @@ @mutation_handler @event_handler class DataSourceRuleService(BaseService): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.data_source_rule_mgr: DataSourceRuleManager = self.locator.get_manager('DataSourceRuleManager') - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) + self.data_source_rule_mgr: DataSourceRuleManager = self.locator.get_manager( + "DataSourceRuleManager" + ) + + @transaction( + permission="cost-analysis:DataSourceRule.write", + role_types=["DOMAIN_OWNER", "WORKSPACE_OWNER"], + ) def create(self, params): """Create data source rule @@ -38,6 +53,8 @@ def create(self, params): 'actions': 'dict', 'options': 'dict', 'tags': 'dict', + 'resource_group: 'str', + 'workspace_id': 'str', 'domain_id': 'str' } @@ -46,37 +63,59 @@ def create(self, params): """ return self.create_data_source_rule(params) - @check_required(['data_source_id', 'conditions_policy', 'actions', 'domain_id']) - @change_date_value(['start', 'end']) + @check_required( + [ + "data_source_id", + "conditions_policy", + "actions", + "resource_group", + "domain_id", + ] + ) + @change_date_value(["start", "end"]) def create_data_source_rule(self, params): - domain_id = params['domain_id'] - data_source_id = params['data_source_id'] - conditions = params.get('conditions', []) - conditions_policy = params['conditions_policy'] - actions = params['actions'] - rule_type = params.get('rule_type', 'CUSTOM') - - if conditions_policy == 'ALWAYS': - params['conditions'] = [] + domain_id = params["domain_id"] + data_source_id = params["data_source_id"] + conditions = params.get("conditions", []) + conditions_policy = params["conditions_policy"] + actions = params["actions"] + rule_type = params.get("rule_type", "CUSTOM") + + identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") + + if params["resource_group"] == "WORKSPACE": + identity_mgr.get_workspace(params["workspace_id"]) + else: + params["workspace_id"] = "*" + + if conditions_policy == "ALWAYS": + params["conditions"] = [] else: if len(conditions) == 0: - raise ERROR_REQUIRED_PARAMETER(key='conditions') + raise ERROR_REQUIRED_PARAMETER(key="conditions") else: self._check_conditions(conditions) self._check_actions(actions, domain_id) - data_source_mgr: DataSourceManager = self.locator.get_manager('DataSourceManager') + data_source_mgr: DataSourceManager = self.locator.get_manager( + "DataSourceManager" + ) data_source_vo = data_source_mgr.get_data_source(data_source_id, domain_id) - params['data_source'] = data_source_vo - params['order'] = self._get_highest_order(data_source_id, rule_type, domain_id) + 1 + params["data_source"] = data_source_vo + params["order"] = ( + self._get_highest_order(data_source_id, rule_type, domain_id) + 1 + ) return self.data_source_rule_mgr.create_data_source_rule(params) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_rule_id', 'domain_id']) - @change_date_value(['end']) + @transaction( + permission="cost-analysis:DataSourceRule.write", + role_types=["DOMAIN_OWNER", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_rule_id", "domain_id"]) + @change_date_value(["end"]) def update(self, params): """Update data source rule @@ -96,34 +135,41 @@ def update(self, params): data_source_rule_vo (object) """ - data_source_rule_id = params['data_source_rule_id'] - domain_id = params['domain_id'] - conditions_policy = params.get('conditions_policy') - conditions = params.get('conditions', []) + data_source_rule_id = params["data_source_rule_id"] + domain_id = params["domain_id"] + conditions_policy = params.get("conditions_policy") + conditions = params.get("conditions", []) - data_source_rule_vo = self.data_source_rule_mgr.get_data_source_rule(data_source_rule_id, domain_id) + data_source_rule_vo = self.data_source_rule_mgr.get_data_source_rule( + data_source_rule_id, domain_id + ) - if data_source_rule_vo.rule_type == 'MANAGED': + if data_source_rule_vo.rule_type == "MANAGED": raise ERROR_NOT_ALLOWED_TO_UPDATE_RULE() if conditions_policy: - if conditions_policy == 'ALWAYS': - params['conditions'] = [] + if conditions_policy == "ALWAYS": + params["conditions"] = [] else: if len(conditions) == 0: - raise ERROR_REQUIRED_PARAMETER(key='conditions') + raise ERROR_REQUIRED_PARAMETER(key="conditions") else: self._check_conditions(conditions) - if 'actions' in params: - self._check_actions(params['actions'], domain_id) + if "actions" in params: + self._check_actions(params["actions"], domain_id) - return self.data_source_rule_mgr.update_data_source_rule_by_vo(params, data_source_rule_vo) + return self.data_source_rule_mgr.update_data_source_rule_by_vo( + params, data_source_rule_vo + ) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_rule_id', 'order', 'domain_id']) + @transaction( + permission="cost-analysis:DataSourceRule.write", + role_types=["DOMAIN_OWNER", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_rule_id", "order", "domain_id"]) def change_order(self, params): - """ Change data source rule's order + """Change data source rule's order Args: params (dict): { @@ -136,47 +182,63 @@ def change_order(self, params): data_source_rule_vo (object) """ - data_source_rule_id = params['data_source_rule_id'] - order = params['order'] - domain_id = params['domain_id'] + data_source_rule_id = params["data_source_rule_id"] + order = params["order"] + domain_id = params["domain_id"] self._check_order(order) - target_data_source_rule_vo: DataSourceRule = self.data_source_rule_mgr.get_data_source_rule(data_source_rule_id, - domain_id) + target_data_source_rule_vo: DataSourceRule = ( + self.data_source_rule_mgr.get_data_source_rule( + data_source_rule_id, domain_id + ) + ) - if target_data_source_rule_vo.rule_type == 'MANAGED': + if target_data_source_rule_vo.rule_type == "MANAGED": raise ERROR_NOT_ALLOWED_TO_CHANGE_ORDER() if target_data_source_rule_vo.order == order: return target_data_source_rule_vo - highest_order = self._get_highest_order(target_data_source_rule_vo.data_source_id, - target_data_source_rule_vo.rule_type, - target_data_source_rule_vo.domain_id) + highest_order = self._get_highest_order( + target_data_source_rule_vo.data_source_id, + target_data_source_rule_vo.rule_type, + target_data_source_rule_vo.domain_id, + ) if order > highest_order: - raise ERROR_INVALID_PARAMETER(key='order', - reason=f'There is no data source rules greater than the {str(order)} order.') - - data_source_rule_vos = self._get_all_data_source_rules(target_data_source_rule_vo.data_source_id, - target_data_source_rule_vo.rule_type, - target_data_source_rule_vo.domain_id, - target_data_source_rule_vo.data_source_rule_id) + raise ERROR_INVALID_PARAMETER( + key="order", + reason=f"There is no data source rules greater than the {str(order)} order.", + ) + + data_source_rule_vos = self._get_all_data_source_rules( + target_data_source_rule_vo.data_source_id, + target_data_source_rule_vo.rule_type, + target_data_source_rule_vo.domain_id, + target_data_source_rule_vo.data_source_rule_id, + ) data_source_rule_vos.insert(order - 1, target_data_source_rule_vo) i = 0 for data_source_rule_vo in data_source_rule_vos: if target_data_source_rule_vo != data_source_rule_vo: - self.data_source_rule_mgr.update_data_source_rule_by_vo({'order': i + 1}, data_source_rule_vo) + self.data_source_rule_mgr.update_data_source_rule_by_vo( + {"order": i + 1}, data_source_rule_vo + ) i += 1 - return self.data_source_rule_mgr.update_data_source_rule_by_vo({'order': order}, target_data_source_rule_vo) + return self.data_source_rule_mgr.update_data_source_rule_by_vo( + {"order": order}, target_data_source_rule_vo + ) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_rule_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSourceRule.write", + role_types=["DOMAIN_OWNER", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_rule_id", "domain_id"]) def delete(self, params): """Delete data source rule @@ -190,53 +252,71 @@ def delete(self, params): None """ - data_source_rule_id = params['data_source_rule_id'] - domain_id = params['domain_id'] + data_source_rule_id = params["data_source_rule_id"] + domain_id = params["domain_id"] - data_source_rule_vo: DataSourceRule = self.data_source_rule_mgr.get_data_source_rule(data_source_rule_id, - domain_id) + data_source_rule_vo: DataSourceRule = ( + self.data_source_rule_mgr.get_data_source_rule( + data_source_rule_id, domain_id + ) + ) rule_type = data_source_rule_vo.rule_type - if rule_type == 'MANAGED': + if rule_type == "MANAGED": raise ERROR_NOT_ALLOWED_TO_DELETE_RULE() data_source_id = data_source_rule_vo.data_source_id self.data_source_rule_mgr.delete_data_source_rule_by_vo(data_source_rule_vo) - data_source_rule_vos = self._get_all_data_source_rules(data_source_id, rule_type, domain_id) + data_source_rule_vos = self._get_all_data_source_rules( + data_source_id, rule_type, domain_id + ) i = 0 for data_source_rule_vo in data_source_rule_vos: - self.data_source_rule_mgr.update_data_source_rule_by_vo({'order': i + 1}, data_source_rule_vo) + self.data_source_rule_mgr.update_data_source_rule_by_vo( + {"order": i + 1}, data_source_rule_vo + ) i += 1 - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_rule_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSourceRule.read", + role_types=["DOMAIN_OWNER", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["data_source_rule_id", "domain_id"]) def get(self, params): - """ Get data source rule + """Get data source rule Args: params (dict): { 'data_source_rule_id': 'str', + 'workspace_id' : 'list', 'domain_id': 'str', - 'only': 'list } Returns: data_source_rule_vo (object) """ - data_source_rule_id = params['data_source_rule_id'] - domain_id = params['domain_id'] - - return self.data_source_rule_mgr.get_data_source_rule(data_source_rule_id, domain_id, params.get('only')) - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['domain_id']) - @append_query_filter(['data_source_rule_id', 'name', 'data_source_id', 'domain_id']) - @append_keyword_filter(['data_source_rule_id', 'name']) + data_source_rule_id = params["data_source_rule_id"] + domain_id = params["domain_id"] + workspace_id = params.get("workspace_id") + + return self.data_source_rule_mgr.get_data_source_rule( + data_source_rule_id, domain_id, workspcae_id + ) + + @transaction( + permission="cost-analysis:DataSourceRule.read", + role_types=["DOMAIN_OWNER", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["domain_id"]) + @append_query_filter(["data_source_rule_id", "name", "data_source_id", "domain_id"]) + @append_keyword_filter(["data_source_rule_id", "name"]) def list(self, params): - """ List data source rule + """List data source rule Args: params (dict): { @@ -252,13 +332,17 @@ def list(self, params): total_count """ - query = params.get('query', {}) + query = params.get("query", {}) return self.data_source_rule_mgr.list_data_source_rules(query) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['query', 'domain_id']) - @append_query_filter(['domain_id']) - @append_keyword_filter(['data_source_rule_id', 'name']) + @transaction( + permission="cost-analysis:DataSourceRule.read", + role_types=["DOMAIN_OWNER", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["query", "domain_id"]) + @append_query_filter(["domain_id"]) + @append_keyword_filter(["data_source_rule_id", "name"]) def stat(self, params): """ Args: @@ -272,84 +356,91 @@ def stat(self, params): """ - query = params.get('query', {}) + query = params.get("query", {}) return self.data_source_rule_mgr.stat_data_source_rules(query) @staticmethod def _check_conditions(conditions): for condition in conditions: - key = condition.get('key') - value = condition.get('value') - operator = condition.get('operator') + key = condition.get("key") + value = condition.get("value") + operator = condition.get("operator") if not (key and value and operator): - raise ERROR_INVALID_PARAMETER(key='conditions', reason='Condition should have key, value and operator.') + raise ERROR_INVALID_PARAMETER( + key="conditions", + reason="Condition should have key, value and operator.", + ) if key not in _SUPPORTED_CONDITION_KEYS: - if not (fnmatch.fnmatch(key, 'additional_info.*') or fnmatch.fnmatch(key, 'tags.*')): - raise ERROR_INVALID_PARAMETER(key='conditions.key', - reason=f'Unsupported key. ' - f'({" | ".join(_SUPPORTED_CONDITION_KEYS)})') + if not ( + fnmatch.fnmatch(key, "additional_info.*") + or fnmatch.fnmatch(key, "tags.*") + ): + raise ERROR_INVALID_PARAMETER( + key="conditions.key", + reason=f"Unsupported key. " + f'({" | ".join(_SUPPORTED_CONDITION_KEYS)})', + ) if operator not in _SUPPORTED_CONDITION_OPERATORS: - raise ERROR_INVALID_PARAMETER(key='conditions.operator', - reason=f'Unsupported operator. ' - f'({" | ".join(_SUPPORTED_CONDITION_OPERATORS)})') + raise ERROR_INVALID_PARAMETER( + key="conditions.operator", + reason=f"Unsupported operator. " + f'({" | ".join(_SUPPORTED_CONDITION_OPERATORS)})', + ) def _check_actions(self, actions, domain_id): - if project_id := actions.get('change_project'): - identity_mgr: IdentityManager = self.locator.get_manager('IdentityManager') + if project_id := actions.get("change_project"): + identity_mgr: IdentityManager = self.locator.get_manager("IdentityManager") identity_mgr.get_project(project_id, domain_id) - if match_project := actions.get('match_project'): - if 'source' not in match_project: - raise ERROR_REQUIRED_PARAMETER(key='actions.match_project.source') + if match_project := actions.get("match_project"): + if "source" not in match_project: + raise ERROR_REQUIRED_PARAMETER(key="actions.match_project.source") - if match_service_account := actions.get('match_service_account'): - if 'source' not in match_service_account: - raise ERROR_REQUIRED_PARAMETER(key='actions.match_service_account.source') + if match_service_account := actions.get("match_service_account"): + if "source" not in match_service_account: + raise ERROR_REQUIRED_PARAMETER( + key="actions.match_service_account.source" + ) def _get_highest_order(self, data_source_id, rule_type, domain_id): - data_source_rule_vos = self.data_source_rule_mgr.filter_data_source_rules(data_source_id=data_source_id, - rule_type=rule_type, - domain_id=domain_id) + data_source_rule_vos = self.data_source_rule_mgr.filter_data_source_rules( + data_source_id=data_source_id, rule_type=rule_type, domain_id=domain_id + ) return data_source_rule_vos.count() @staticmethod def _check_order(order): if order <= 0: - raise ERROR_INVALID_PARAMETER(key='order', reason='The order must be greater than 0.') + raise ERROR_INVALID_PARAMETER( + key="order", reason="The order must be greater than 0." + ) - def _get_all_data_source_rules(self, data_source_id, rule_type, domain_id, exclude_data_source_rule_id=None): + def _get_all_data_source_rules( + self, data_source_id, rule_type, domain_id, exclude_data_source_rule_id=None + ): query = { - 'filter': [ - { - 'k': 'domain_id', - 'v': domain_id, - 'o': 'eq' - }, - { - 'k': 'data_source_id', - 'v': data_source_id, - 'o': 'eq' - }, - { - 'k': 'rule_type', - 'v': rule_type, - 'o': 'eq' - }, + "filter": [ + {"k": "domain_id", "v": domain_id, "o": "eq"}, + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "rule_type", "v": rule_type, "o": "eq"}, ], - 'sort': { - 'key': 'order' - } + "sort": {"key": "order"}, } if exclude_data_source_rule_id is not None: - query['filter'].append({ - 'k': 'data_source_rule_id', - 'v': exclude_data_source_rule_id, - 'o': 'not' - }) - - data_source_rule_vos, total_count = self.data_source_rule_mgr.list_data_source_rules(query) + query["filter"].append( + { + "k": "data_source_rule_id", + "v": exclude_data_source_rule_id, + "o": "not", + } + ) + + ( + data_source_rule_vos, + total_count, + ) = self.data_source_rule_mgr.list_data_source_rules(query) return list(data_source_rule_vos) diff --git a/src/spaceone/cost_analysis/service/data_source_service.py b/src/spaceone/cost_analysis/service/data_source_service.py index afe3535e..fb00041f 100644 --- a/src/spaceone/cost_analysis/service/data_source_service.py +++ b/src/spaceone/cost_analysis/service/data_source_service.py @@ -4,13 +4,15 @@ from spaceone.cost_analysis.service.job_service import JobService from spaceone.cost_analysis.manager.repository_manager import RepositoryManager from spaceone.cost_analysis.manager.secret_manager import SecretManager -from spaceone.cost_analysis.manager.data_source_plugin_manager import DataSourcePluginManager +from spaceone.cost_analysis.manager.data_source_plugin_manager import ( + DataSourcePluginManager, +) +from spaceone.cost_analysis.manager.budget_usage_manager import BudgetUsageManager +from spaceone.cost_analysis.manager.cost_manager import CostManager +from spaceone.cost_analysis.model.data_source_model import DataSource from spaceone.cost_analysis.manager.data_source_manager import DataSourceManager from spaceone.cost_analysis.manager.job_manager import JobManager -from spaceone.cost_analysis.manager.cost_manager import CostManager -from spaceone.cost_analysis.manager.budget_usage_manager import BudgetUsageManager from spaceone.cost_analysis.manager.identity_manager import IdentityManager -from spaceone.cost_analysis.model.data_source_model import DataSource _LOGGER = logging.getLogger(__name__) @@ -20,17 +22,20 @@ @mutation_handler @event_handler class DataSourceService(BaseService): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.data_source_mgr: DataSourceManager = self.locator.get_manager('DataSourceManager') - self.ds_plugin_mgr: DataSourcePluginManager = self.locator.get_manager('DataSourcePluginManager') - self.cost_mgr: CostManager = self.locator.get_manager('CostManager') - self.budget_usage_mgr: BudgetUsageManager = self.locator.get_manager('BudgetUsageManager') - self.job_mgr: JobManager = self.locator.get_manager('JobManager') - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['name', 'data_source_type', 'domain_id']) + self.data_source_mgr = DataSourceManager() + self.ds_plugin_mgr = DataSourcePluginManager() + self.cost_mg = CostManager() + self.budget_usage_mgr = BudgetUsageManager() + self.job_mgr = JobManager() + self.identity_mgr = IdentityManager() + + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["name", "data_source_type", "domain_id"]) def register(self, params): """Register data source @@ -44,103 +49,129 @@ def register(self, params): 'template': 'dict', 'plugin_info': 'dict', 'tags': 'dict', - 'domain_id': 'str' + 'resource_group': 'str # required + 'workspace_id': 'str' + 'domain_id': 'str' # injected from auth } Returns: data_source_vo (object) """ - domain_id = params['domain_id'] - data_source_type = params['data_source_type'] + domain_id = params["domain_id"] + data_source_type = params["data_source_type"] + + # Check permission by resource group + if params["resource_group"] == "WORKSPACE": + self.identity_mgr.get_workspace(params["workspace_id"]) + else: + params["workspace_id"] = "*" - if data_source_type == 'EXTERNAL': - params['template'] = None + if data_source_type == "EXTERNAL": + params["template"] = None - plugin_info = params.get('plugin_info', {}) - secret_type = params.get('secret_type', 'MANUAL') + plugin_info = params.get("plugin_info", {}) + secret_type = params.get("secret_type", "MANUAL") - if secret_type == 'USE_SERVICE_ACCOUNT_SECRET' and 'provider' not in params: - raise ERROR_REQUIRED_PARAMETER(key='provider') + if secret_type == "USE_SERVICE_ACCOUNT_SECRET" and "provider" not in params: + raise ERROR_REQUIRED_PARAMETER(key="provider") self._validate_plugin_info(plugin_info, secret_type) - self._check_plugin(plugin_info['plugin_id'], domain_id) + self._check_plugin(plugin_info["plugin_id"], domain_id) - if 'secret_filter' in params: - self.validate_secret_filter(params['secret_filter'], params['domain_id']) + if "secret_filter" in params: + self.validate_secret_filter( + params["secret_filter"], params["domain_id"] + ) # Update metadata - endpoint, updated_version = self.ds_plugin_mgr.get_data_source_plugin_endpoint(plugin_info, domain_id) + ( + endpoint, + updated_version, + ) = self.ds_plugin_mgr.get_data_source_plugin_endpoint( + plugin_info, domain_id + ) if updated_version: - params['plugin_info']['version'] = updated_version + params["plugin_info"]["version"] = updated_version - options = params['plugin_info'].get('options', {}) + options = params["plugin_info"].get("options", {}) plugin_metadata = self._init_plugin(endpoint, options, domain_id) - params['plugin_info']['metadata'] = plugin_metadata + params["plugin_info"]["metadata"] = plugin_metadata - secret_data = plugin_info.get('secret_data') - if secret_type == 'MANUAL' and secret_data: + secret_data = plugin_info.get("secret_data") + if secret_type == "MANUAL" and secret_data: self._verify_plugin(endpoint, plugin_info, domain_id) - secret_mgr: SecretManager = self.locator.get_manager('SecretManager') - secret_id = secret_mgr.create_secret(domain_id, secret_data, plugin_info.get('schema')) + secret_mgr: SecretManager = self.locator.get_manager("SecretManager") + secret_id = secret_mgr.create_secret( + domain_id, secret_data, plugin_info.get("schema") + ) - params['plugin_info']['secret_id'] = secret_id - del params['plugin_info']['secret_data'] + params["plugin_info"]["secret_id"] = secret_id + del params["plugin_info"]["secret_data"] else: - params['plugin_info'] = None - params['secret_type'] = None - params['secret_filter'] = None + params["plugin_info"] = None + params["secret_type"] = None + params["secret_filter"] = None - if template := params.get('template'): + if template := params.get("template"): # Check Template pass else: - raise ERROR_REQUIRED_PARAMETER(key='template') + raise ERROR_REQUIRED_PARAMETER(key="template") data_source_vo: DataSource = self.data_source_mgr.register_data_source(params) - if data_source_type == 'EXTERNAL': + if data_source_type == "EXTERNAL": data_source_id = data_source_vo.data_source_id metadata = data_source_vo.plugin_info.metadata - self.ds_plugin_mgr.create_data_source_rules_by_metadata(metadata, data_source_id, domain_id) + self.ds_plugin_mgr.create_data_source_rules_by_metadata( + metadata, data_source_id, domain_id + ) return data_source_vo - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_id", "domain_id"]) def update(self, params): """Update data source Args: params (dict): { - 'data_source_id': 'str', + 'data_source_id': 'str', # required 'name': 'str', 'secret_filter': 'dict', 'template': 'dict', 'tags': 'dict' - 'domain_id': 'str' + 'domain_id': 'str' # injected from auth } Returns: data_source_vo (object) """ - data_source_id = params['data_source_id'] - domain_id = params['domain_id'] - data_source_vo: DataSource = self.data_source_mgr.get_data_source(data_source_id, domain_id) - - if 'secret_filter' in params: - if data_source_vo.secret_type == 'USE_SERVICE_ACCOUNT_SECRET': - self.validate_secret_filter(params['secret_filter'], params['domain_id']) + data_source_id = params["data_source_id"] + domain_id = params["domain_id"] + data_source_vo: DataSource = self.data_source_mgr.get_data_source( + data_source_id, domain_id + ) + + if "secret_filter" in params: + if data_source_vo.secret_type == "USE_SERVICE_ACCOUNT_SECRET": + self.validate_secret_filter( + params["secret_filter"], params["domain_id"] + ) else: raise ERROR_NOT_ALLOW_SECRET_FILTER(data_source_id=data_source_id) - if 'template' in params: - if data_source_vo.data_source_type == 'LOCAL': + if "template" in params: + if data_source_vo.data_source_type == "LOCAL": # Check Template pass else: @@ -148,69 +179,88 @@ def update(self, params): return self.data_source_mgr.update_data_source_by_vo(params, data_source_vo) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_id", "domain_id"]) def enable(self, params): - """ Enable data source + """Enable data source Args: params (dict): { - 'data_source_id': 'str', - 'domain_id': 'str' + 'data_source_id': 'str', # required + 'domain_id': 'str' # injected from auth } Returns: data_source_vo (object) """ - data_source_id = params['data_source_id'] - domain_id = params['domain_id'] - data_source_vo: DataSource = self.data_source_mgr.get_data_source(data_source_id, domain_id) - - return self.data_source_mgr.update_data_source_by_vo({'state': 'ENABLED'}, data_source_vo) - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + data_source_id = params["data_source_id"] + domain_id = params["domain_id"] + data_source_vo: DataSource = self.data_source_mgr.get_data_source( + data_source_id, domain_id + ) + + return self.data_source_mgr.update_data_source_by_vo( + {"state": "ENABLED"}, data_source_vo + ) + + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_id", "domain_id"]) def disable(self, params): - """ Disable data source + """Disable data source Args: params (dict): { - 'data_source_id': 'str', - 'domain_id': 'str' + 'data_source_id': 'str', # required + 'domain_id': 'str' # injected from auth } Returns: data_source_vo (object) """ - data_source_id = params['data_source_id'] - domain_id = params['domain_id'] - data_source_vo: DataSource = self.data_source_mgr.get_data_source(data_source_id, domain_id) - - return self.data_source_mgr.update_data_source_by_vo({'state': 'DISABLED'}, data_source_vo) - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + data_source_id = params["data_source_id"] + domain_id = params["domain_id"] + data_source_vo: DataSource = self.data_source_mgr.get_data_source( + data_source_id, domain_id + ) + + return self.data_source_mgr.update_data_source_by_vo( + {"state": "DISABLED"}, data_source_vo + ) + + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_id", "domain_id"]) def deregister(self, params): """Deregister data source Args: params (dict): { - 'data_source_id': 'str', + 'data_source_id': 'str', # required 'cascade_delete_cost: 'bool', - 'domain_id': 'str' + 'domain_id': 'str' # injected from auth } Returns: None """ - data_source_id = params['data_source_id'] - cascade_delete_cost = params.get('cascade_delete_cost', True) - domain_id = params['domain_id'] + data_source_id = params["data_source_id"] + cascade_delete_cost = params.get("cascade_delete_cost", True) + domain_id = params["domain_id"] - data_source_vo: DataSource = self.data_source_mgr.get_data_source(data_source_id, domain_id) + data_source_vo: DataSource = self.data_source_mgr.get_data_source( + data_source_id, domain_id + ) if cascade_delete_cost: self.cost_mgr.delete_cost_with_datasource(domain_id, data_source_id) @@ -222,160 +272,191 @@ def deregister(self, params): secret_id = data_source_vo.plugin_info.secret_id if secret_id: - secret_mgr: SecretManager = self.locator.get_manager('SecretManager') + secret_mgr: SecretManager = self.locator.get_manager("SecretManager") secret_mgr.delete_secret(secret_id, domain_id) self.data_source_mgr.deregister_data_source_by_vo(data_source_vo) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_id", "domain_id"]) def sync(self, params): """Sync data with data source Args: params (dict): { - 'data_source_id': 'str', + 'data_source_id': 'str', # required 'start': 'datetime', 'no_preload_cache': 'bool', - 'domain_id': 'str' + 'domain_id': 'str' # injected from auth } Returns: None """ - job_service: JobService = self.locator.get_service('JobService') + job_service: JobService = self.locator.get_service("JobService") - data_source_id = params['data_source_id'] - domain_id = params['domain_id'] + data_source_id = params["data_source_id"] + domain_id = params["domain_id"] job_options = { - 'no_preload_cache': params.get('no_preload_cache', False), - 'start': params.get('start') + "no_preload_cache": params.get("no_preload_cache", False), + "start": params.get("start"), } - data_source_vo: DataSource = self.data_source_mgr.get_data_source(data_source_id, domain_id) + data_source_vo: DataSource = self.data_source_mgr.get_data_source( + data_source_id, domain_id + ) - if data_source_vo.state == 'DISABLED': + if data_source_vo.state == "DISABLED": raise ERROR_DATA_SOURCE_STATE(data_source_id=data_source_id) - if data_source_vo.data_source_type == 'LOCAL': + if data_source_vo.data_source_type == "LOCAL": raise ERROR_NOT_ALLOW_SYNC_COMMAND(data_source_id=data_source_id) return job_service.create_cost_job(data_source_vo, job_options) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_id", "domain_id"]) def verify_plugin(self, params): - """ Verify data source plugin + """Verify data source plugin Args: params (dict): { - 'data_source_id': 'str', - 'domain_id': 'str' + 'data_source_id': 'str', # required + 'domain_id': 'str' # injected from auth } Returns: data_source_vo (object) """ - data_source_id = params['data_source_id'] - domain_id = params['domain_id'] - data_source_vo: DataSource = self.data_source_mgr.get_data_source(data_source_id, domain_id) + data_source_id = params["data_source_id"] + domain_id = params["domain_id"] + data_source_vo: DataSource = self.data_source_mgr.get_data_source( + data_source_id, domain_id + ) - if data_source_vo.data_source_type == 'LOCAL': + if data_source_vo.data_source_type == "LOCAL": raise ERROR_NOT_ALLOW_PLUGIN_SETTINGS(data_source_id=data_source_id) - endpoint = self.ds_plugin_mgr.get_data_source_plugin_endpoint_by_vo(data_source_vo) + endpoint = self.ds_plugin_mgr.get_data_source_plugin_endpoint_by_vo( + data_source_vo + ) plugin_info = data_source_vo.plugin_info.to_dict() self._verify_plugin(endpoint, plugin_info, domain_id) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSource.write", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER"], + ) + @check_required(["data_source_id", "domain_id"]) def update_plugin(self, params): """Update data source plugin Args: params (dict): { - 'data_source_id': 'str', + 'data_source_id': 'str', # required 'version': 'str', 'options': 'dict', 'upgrade_mode': 'str', - 'domain_id': 'str' + 'domain_id': 'str' # injected from auth } Returns: data_source_vo (object) """ - data_source_id = params['data_source_id'] - domain_id = params['domain_id'] - version = params.get('version') - options = params.get('options') - upgrade_mode = params.get('upgrade_mode') + data_source_id = params["data_source_id"] + domain_id = params["domain_id"] + version = params.get("version") + options = params.get("options") + upgrade_mode = params.get("upgrade_mode") data_source_vo = self.data_source_mgr.get_data_source(data_source_id, domain_id) - if data_source_vo.data_source_type == 'LOCAL': + if data_source_vo.data_source_type == "LOCAL": raise ERROR_NOT_ALLOW_PLUGIN_SETTINGS(data_source_id=data_source_id) plugin_info = data_source_vo.plugin_info.to_dict() if version: - plugin_info['version'] = version + plugin_info["version"] = version if isinstance(options, dict): - plugin_info['options'] = options + plugin_info["options"] = options if upgrade_mode: - plugin_info['upgrade_mode'] = upgrade_mode + plugin_info["upgrade_mode"] = upgrade_mode - endpoint, updated_version = self.ds_plugin_mgr.get_data_source_plugin_endpoint(plugin_info, domain_id) + endpoint, updated_version = self.ds_plugin_mgr.get_data_source_plugin_endpoint( + plugin_info, domain_id + ) if updated_version: - plugin_info['version'] = updated_version + plugin_info["version"] = updated_version - options = plugin_info.get('options', {}) + options = plugin_info.get("options", {}) plugin_metadata = self._init_plugin(endpoint, options, domain_id) - plugin_info['metadata'] = plugin_metadata + plugin_info["metadata"] = plugin_metadata - params = { - 'plugin_info': plugin_info - } + params = {"plugin_info": plugin_info} - data_source_vo = self.data_source_mgr.update_data_source_by_vo(params, data_source_vo) + data_source_vo = self.data_source_mgr.update_data_source_by_vo( + params, data_source_vo + ) self.ds_plugin_mgr.delete_data_source_rules(data_source_id, domain_id) - self.ds_plugin_mgr.create_data_source_rules_by_metadata(plugin_metadata, data_source_id, domain_id) + self.ds_plugin_mgr.create_data_source_rules_by_metadata( + plugin_metadata, data_source_id, domain_id + ) return data_source_vo - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['data_source_id', 'domain_id']) + @transaction( + permission="cost-analysis:DataSource.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["data_source_id", "domain_id"]) def get(self, params): - """ Get data source + """Get data source Args: params (dict): { - 'data_source_id': 'str', - 'domain_id': 'str', - 'only': 'list + 'data_source_id': 'str', # required + 'workspace_id': 'list' + 'domain_id': 'str', # injected from auth } Returns: data_source_vo (object) """ - data_source_id = params['data_source_id'] - domain_id = params['domain_id'] - - return self.data_source_mgr.get_data_source(data_source_id, domain_id, params.get('only')) - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['domain_id']) - @append_query_filter(['data_source_id', 'name', 'state', 'data_source_type', 'provider', 'domain_id']) - @change_tag_filter('tags') - @append_keyword_filter(['data_source_id', 'name']) + data_source_id = params["data_source_id"] + domain_id = params["domain_id"] + workspcae_id = params.get("workspace_id") + + return self.data_source_mgr.get_data_source( + data_source_id, domain_id, workspcae_id + ) + + @transaction( + permission="cost-analysis:DataSource.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["domain_id"]) + @append_query_filter( + ["data_source_id", "name", "state", "data_source_type", "provider", "domain_id"] + ) + @change_tag_filter("tags") + @append_keyword_filter(["data_source_id", "name"]) def list(self, params): - """ List data sources + """List data sources Args: params (dict): { @@ -384,6 +465,7 @@ def list(self, params): 'state': 'str', 'data_source_type': 'str', 'provider': 'str', + 'workspace_id': 'str, 'domain_id': 'str', 'query': 'dict (spaceone.api.core.v1.Query)' } @@ -393,14 +475,17 @@ def list(self, params): total_count """ - query = params.get('query', {}) + query = params.get("query", {}) return self.data_source_mgr.list_data_sources(query) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['query', 'domain_id']) - @append_query_filter(['domain_id']) - @change_tag_filter('tags') - @append_keyword_filter(['data_source_id', 'name']) + @transaction( + permission="cost-analysis:DataSource.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["query", "domain_id"]) + @append_query_filter(["domain_id"]) + @change_tag_filter("tags") + @append_keyword_filter(["data_source_id", "name"]) def stat(self, params): """ Args: @@ -414,33 +499,52 @@ def stat(self, params): """ - query = params.get('query', {}) + query = params.get("query", {}) return self.data_source_mgr.stat_data_sources(query) def validate_secret_filter(self, secret_filter, domain_id): - if 'secrets' in secret_filter: - _query = {'filter': [{'k': 'secret_id', 'v': secret_filter['secrets'], 'o': 'in'}]} + if "secrets" in secret_filter: + _query = { + "filter": [{"k": "secret_id", "v": secret_filter["secrets"], "o": "in"}] + } secret_mgr: SecretManager = self.locator.get_manager(SecretManager) response = secret_mgr.list_secrets(_query, domain_id) - if response.get('total_count', 0) != len(secret_filter['secrets']): - raise ERROR_INVALID_PARAMETER(key='secret_filter.secrets', reason='Secrets not found') - - if 'service_accounts' in secret_filter: - _query = {'filter': [{'k': 'service_account_id', 'v': secret_filter['service_accounts'], 'o': 'in'}]} + if response.get("total_count", 0) != len(secret_filter["secrets"]): + raise ERROR_INVALID_PARAMETER( + key="secret_filter.secrets", reason="Secrets not found" + ) + + if "service_accounts" in secret_filter: + _query = { + "filter": [ + { + "k": "service_account_id", + "v": secret_filter["service_accounts"], + "o": "in", + } + ] + } identity_mgr: IdentityManager = self.locator.get_manager(IdentityManager) response = identity_mgr.list_service_accounts(_query, domain_id) - if response.get('total_count', 0) != len(secret_filter['service_accounts']): - raise ERROR_INVALID_PARAMETER(key='secret_filter.service_accounts', reason='Service accounts not found') - - if 'schemas' in secret_filter: - _query = {'filter': [{'k': 'name', 'v': secret_filter['schemas'], 'o': 'in'}]} + if response.get("total_count", 0) != len(secret_filter["service_accounts"]): + raise ERROR_INVALID_PARAMETER( + key="secret_filter.service_accounts", + reason="Service accounts not found", + ) + + if "schemas" in secret_filter: + _query = { + "filter": [{"k": "name", "v": secret_filter["schemas"], "o": "in"}] + } repo_mgr: RepositoryManager = self.locator.get_manager(RepositoryManager) response = repo_mgr.list_schemas(_query, domain_id) - if response.get('total_count', 0) != len(secret_filter['schemas']): - raise ERROR_INVALID_PARAMETER(key='secret_filter.schema', reason='Schema not found') + if response.get("total_count", 0) != len(secret_filter["schemas"]): + raise ERROR_INVALID_PARAMETER( + key="secret_filter.schema", reason="Schema not found" + ) def _check_plugin(self, plugin_id, domain_id): - repo_mgr: RepositoryManager = self.locator.get_manager('RepositoryManager') + repo_mgr: RepositoryManager = self.locator.get_manager("RepositoryManager") repo_mgr.get_plugin(plugin_id, domain_id) def _init_plugin(self, endpoint, options, domain_id): @@ -448,10 +552,10 @@ def _init_plugin(self, endpoint, options, domain_id): return self.ds_plugin_mgr.init_plugin(options, domain_id) def _verify_plugin(self, endpoint, plugin_info, domain_id): - options = plugin_info.get('options', {}) - secret_id = plugin_info.get('secret_id') - secret_data = plugin_info.get('secret_data') - schema = plugin_info.get('schema') + options = plugin_info.get("options", {}) + secret_id = plugin_info.get("secret_id") + secret_data = plugin_info.get("secret_data") + schema = plugin_info.get("schema") if not secret_data: secret_data = self._get_secret_data(secret_id, domain_id) @@ -460,7 +564,7 @@ def _verify_plugin(self, endpoint, plugin_info, domain_id): self.ds_plugin_mgr.verify_plugin(options, secret_data, schema, domain_id) def _get_secret_data(self, secret_id, domain_id): - secret_mgr: SecretManager = self.locator.get_manager('SecretManager') + secret_mgr: SecretManager = self.locator.get_manager("SecretManager") if secret_id: secret_data = secret_mgr.get_secret_data(secret_id, domain_id) else: @@ -470,11 +574,14 @@ def _get_secret_data(self, secret_id, domain_id): @staticmethod def _validate_plugin_info(plugin_info, secret_type): - if 'plugin_id' not in plugin_info: - raise ERROR_REQUIRED_PARAMETER(key='plugin_info.plugin_id') + if "plugin_id" not in plugin_info: + raise ERROR_REQUIRED_PARAMETER(key="plugin_info.plugin_id") - if plugin_info.get('upgrade_mode', 'AUTO') == 'MANUAL' and 'version' not in plugin_info: - raise ERROR_REQUIRED_PARAMETER(key='plugin_info.version') + if ( + plugin_info.get("upgrade_mode", "AUTO") == "MANUAL" + and "version" not in plugin_info + ): + raise ERROR_REQUIRED_PARAMETER(key="plugin_info.version") - if secret_type == 'MANUAL' and plugin_info.get('secret_data') is None: - raise ERROR_REQUIRED_PARAMETER(key='plugin_info.secret_data') + if secret_type == "MANUAL" and plugin_info.get("secret_data") is None: + raise ERROR_REQUIRED_PARAMETER(key="plugin_info.secret_data") diff --git a/src/spaceone/cost_analysis/service/job_service.py b/src/spaceone/cost_analysis/service/job_service.py index 4b10bdec..05412320 100644 --- a/src/spaceone/cost_analysis/service/job_service.py +++ b/src/spaceone/cost_analysis/service/job_service.py @@ -13,33 +13,39 @@ from spaceone.cost_analysis.manager.cost_manager import CostManager from spaceone.cost_analysis.manager.job_manager import JobManager from spaceone.cost_analysis.manager.job_task_manager import JobTaskManager -from spaceone.cost_analysis.manager.data_source_plugin_manager import DataSourcePluginManager +from spaceone.cost_analysis.manager.data_source_plugin_manager import ( + DataSourcePluginManager, +) from spaceone.cost_analysis.manager.data_source_manager import DataSourceManager from spaceone.cost_analysis.manager.secret_manager import SecretManager from spaceone.cost_analysis.manager.budget_usage_manager import BudgetUsageManager - _LOGGER = logging.getLogger(__name__) -@authentication_handler(exclude=['create_jobs_by_data_source', 'get_cost_data']) -@authorization_handler(exclude=['create_jobs_by_data_source', 'get_cost_data']) -@mutation_handler(exclude=['create_jobs_by_data_source', 'get_cost_data']) +@authentication_handler +@authorization_handler +@mutation_handler @event_handler class JobService(BaseService): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.cost_mgr: CostManager = self.locator.get_manager('CostManager') - self.job_mgr: JobManager = self.locator.get_manager('JobManager') - self.job_task_mgr: JobTaskManager = self.locator.get_manager('JobTaskManager') - self.data_source_mgr: DataSourceManager = self.locator.get_manager('DataSourceManager') - self.ds_plugin_mgr: DataSourcePluginManager = self.locator.get_manager('DataSourcePluginManager') - self.budget_usage_mgr: BudgetUsageManager = self.locator.get_manager('BudgetUsageManager') - - @transaction(append_meta={'authorization.scope': 'SYSTEM'}) + self.cost_mgr: CostManager = self.locator.get_manager("CostManager") + self.job_mgr: JobManager = self.locator.get_manager("JobManager") + self.job_task_mgr: JobTaskManager = self.locator.get_manager("JobTaskManager") + self.data_source_mgr: DataSourceManager = self.locator.get_manager( + "DataSourceManager" + ) + self.ds_plugin_mgr: DataSourcePluginManager = self.locator.get_manager( + "DataSourcePluginManager" + ) + self.budget_usage_mgr: BudgetUsageManager = self.locator.get_manager( + "BudgetUsageManager" + ) + + @transaction(exclude=["authentication", "authorization", "mutation"]) def create_jobs_by_data_source(self, params): - """ Create jobs by domain + """Create jobs by domain Args: params (dict): {} @@ -52,16 +58,22 @@ def create_jobs_by_data_source(self, params): try: self.create_cost_job(data_source_vo, {}) except Exception as e: - _LOGGER.error(f'[create_jobs_by_data_source] sync error: {e}', exc_info=True) - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['job_id', 'domain_id']) + _LOGGER.error( + f"[create_jobs_by_data_source] sync error: {e}", exc_info=True + ) + + @transaction( + permission="cost-analysis:Job.write", + role_types=["WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["job_id", "workspace_id", "domain_id"]) def cancel(self, params): - """ Get job + """Get job Args: params (dict): { 'job_id': 'str', + 'workspace_id': 'str', 'domain_id': 'str' } @@ -69,20 +81,25 @@ def cancel(self, params): job_vo (object) """ - job_id = params['job_id'] - domain_id = params['domain_id'] + job_id = params["job_id"] + workspace_id = params["workspace_id"] + domain_id = params["domain_id"] job_vo = self.job_mgr.get_job(job_id, domain_id) - if job_vo.status != 'IN_PROGRESS': + if job_vo.status != "IN_PROGRESS": raise ERROR_JOB_STATE(job_state=job_vo.status) return self.job_mgr.change_canceled_status(job_vo) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['job_id', 'domain_id']) + @transaction( + permission="cost-analysis:Job.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["job_id", "domain_id"]) def get(self, params): - """ Get job + """Get job Args: params (dict): { @@ -95,23 +112,29 @@ def get(self, params): job_vo (object) """ - job_id = params['job_id'] - domain_id = params['domain_id'] + job_id = params["job_id"] + workspace_id = params["workspace_id"] + domain_id = params["domain_id"] - return self.job_mgr.get_job(job_id, domain_id, params.get('only')) + return self.job_mgr.get_job(job_id, domain_id, workspace_id) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['domain_id']) - @append_query_filter(['job_id', 'status', 'data_source_id', 'domain_id']) - @append_keyword_filter(['job_id']) + @transaction( + permission="cost-analysis:Job.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["domain_id"]) + @append_query_filter(["job_id", "status", "data_source_id", "domain_id"]) + @append_keyword_filter(["job_id"]) def list(self, params): - """ List jobs + """List jobs Args: params (dict): { 'job_id': 'str', 'status': 'str', 'data_source_id': 'str', + 'workspace_id': 'str', 'domain_id': 'str', 'query': 'dict (spaceone.api.core.v1.Query)' } @@ -121,18 +144,23 @@ def list(self, params): total_count """ - query = params.get('query', {}) + query = params.get("query", {}) return self.job_mgr.list_jobs(query) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['query', 'domain_id']) - @append_query_filter(['domain_id']) - @append_keyword_filter(['job_id']) + @transaction( + permission="cost-analysis:Job.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["query", "domain_id"]) + @append_query_filter(["domain_id"]) + @append_keyword_filter(["job_id"]) def stat(self, params): """ Args: params (dict): { 'domain_id': 'str', + 'workspace_id': 'str, 'query': 'dict (spaceone.api.core.v1.StatisticsQuery)' } @@ -141,11 +169,11 @@ def stat(self, params): """ - query = params.get('query', {}) + query = params.get("query", {}) return self.job_mgr.stat_jobs(query) - @transaction - @check_required(['task_options', 'job_task_id', 'domain_id']) + @transaction(exclude=["authentication", "authorization", "mutation"]) + @check_required(["task_options", "job_task_id", "domain_id"]) def get_cost_data(self, params): """Execute task to get cost data @@ -161,77 +189,102 @@ def get_cost_data(self, params): None """ - task_options = params['task_options'] - job_task_id = params['job_task_id'] - secret_id = params['secret_id'] - domain_id = params['domain_id'] + task_options = params["task_options"] + job_task_id = params["job_task_id"] + secret_id = params["secret_id"] + domain_id = params["domain_id"] cost_data_options = {} job_task_vo: JobTask = self.job_task_mgr.get_job_task(job_task_id, domain_id) - data_source_vo: DataSource = self.data_source_mgr.get_data_source(job_task_vo.data_source_id, domain_id) + data_source_vo: DataSource = self.data_source_mgr.get_data_source( + job_task_vo.data_source_id, domain_id + ) plugin_info = data_source_vo.plugin_info.to_dict() secret_type = data_source_vo.secret_type data_source_id = data_source_vo.data_source_id job_id = job_task_vo.job_id - if self._is_job_canceled(job_id, domain_id): + if self._is_job_failed(job_id, domain_id): self.job_task_mgr.change_canceled_status(job_task_vo) else: job_task_vo = self.job_task_mgr.change_in_progress_status(job_task_vo) try: - options = plugin_info.get('options', {}) - schema = plugin_info.get('schema') + options = plugin_info.get("options", {}) + schema = plugin_info.get("schema") tag_keys = data_source_vo.cost_tag_keys additional_info_keys = data_source_vo.cost_additional_info_keys data_keys = data_source_vo.cost_data_keys secret_type = data_source_vo.secret_type - options.update({'secret_type': secret_type}) + options.update({"secret_type": secret_type}) secret_data = self._get_secret_data(secret_id, domain_id) - if secret_type == 'USE_SERVICE_ACCOUNT_SECRET': - service_account_id, project_id = self._get_service_account_id_and_project_id(params.get('secret_id'), - domain_id) - cost_data_options.update({ - 'service_account_id': service_account_id, - 'project_id': project_id - }) - - endpoint, updated_version = self.ds_plugin_mgr.get_data_source_plugin_endpoint(plugin_info, domain_id) + if secret_type == "USE_SERVICE_ACCOUNT_SECRET": + ( + service_account_id, + project_id, + ) = self._get_service_account_id_and_project_id( + params.get("secret_id"), domain_id + ) + cost_data_options.update( + { + "service_account_id": service_account_id, + "project_id": project_id, + } + ) + + ( + endpoint, + updated_version, + ) = self.ds_plugin_mgr.get_data_source_plugin_endpoint( + plugin_info, domain_id + ) self.ds_plugin_mgr.initialize(endpoint) start_dt = datetime.utcnow() count = 0 is_canceled = False - _LOGGER.debug(f'[get_cost_data] start job ({job_task_id}): {start_dt}') - for costs_data in self.ds_plugin_mgr.get_cost_data(options, secret_data, schema, task_options, domain_id): - results = costs_data.get('results', []) + _LOGGER.debug(f"[get_cost_data] start job ({job_task_id}): {start_dt}") + for costs_data in self.ds_plugin_mgr.get_cost_data( + options, secret_data, schema, task_options, domain_id + ): + results = costs_data.get("results", []) for cost_data in results: count += 1 self._check_cost_data(cost_data) - self._create_cost_data(cost_data, job_task_vo, cost_data_options) + self._create_cost_data( + cost_data, job_task_vo, cost_data_options + ) tag_keys = self._append_tag_keys(tag_keys, cost_data) - additional_info_keys = self._append_additional_info_keys(additional_info_keys, cost_data) + additional_info_keys = self._append_additional_info_keys( + additional_info_keys, cost_data + ) data_keys = self._append_data_keys(data_keys, cost_data) - if self._is_job_canceled(job_id, domain_id): + if self._is_job_failed(job_id, domain_id): self.job_task_mgr.change_canceled_status(job_task_vo) is_canceled = True break else: - job_task_vo = self.job_task_mgr.update_sync_status(job_task_vo, len(results)) + job_task_vo = self.job_task_mgr.update_sync_status( + job_task_vo, len(results) + ) if not is_canceled: end_dt = datetime.utcnow() - _LOGGER.debug(f'[get_cost_data] end job ({job_task_id}): {end_dt}') - _LOGGER.debug(f'[get_cost_data] total job time ({job_task_id}): {end_dt - start_dt}') - - self._update_keys(data_source_vo, tag_keys, additional_info_keys, data_keys) + _LOGGER.debug(f"[get_cost_data] end job ({job_task_id}): {end_dt}") + _LOGGER.debug( + f"[get_cost_data] total job time ({job_task_id}): {end_dt - start_dt}" + ) + + self._update_keys( + data_source_vo, tag_keys, additional_info_keys, data_keys + ) self.job_task_mgr.change_success_status(job_task_vo, count) except Exception as e: @@ -244,23 +297,32 @@ def create_cost_job(self, data_source_vo: DataSource, job_options): changed = [] data_source_id = data_source_vo.data_source_id + resource_group = data_source_vo.resource_group domain_id = data_source_vo.domain_id - endpoint = self.ds_plugin_mgr.get_data_source_plugin_endpoint_by_vo(data_source_vo) + workspace_id = "*" + if resource_group == "WORKSPACE": + workspace_id = data_source_vo.workspace_id + + endpoint = self.ds_plugin_mgr.get_data_source_plugin_endpoint_by_vo( + data_source_vo + ) options = data_source_vo.plugin_info.options schema = data_source_vo.plugin_info.schema if data_source_vo.secret_type: secret_type = data_source_vo.secret_type else: - secret_type = 'MANUAL' + secret_type = "MANUAL" - options.update({'secret_type': secret_type}) - secret_ids = self._list_secret_ids_from_secret_type(data_source_vo, secret_type, domain_id) + options.update({"secret_type": secret_type}) + secret_ids = self._list_secret_ids_from_secret_type( + data_source_vo, secret_type, domain_id + ) self.ds_plugin_mgr.initialize(endpoint) params = { - 'last_synchronized_at': data_source_vo.last_synchronized_at, - 'start': job_options.get('start') + "last_synchronized_at": data_source_vo.last_synchronized_at, + "start": job_options.get("start"), } start, last_synchronized_at = self._get_start_last_synchronized_at(params) @@ -268,64 +330,91 @@ def create_cost_job(self, data_source_vo: DataSource, job_options): for secret_id in secret_ids: try: secret_data = self._get_secret_data(secret_id, domain_id) - single_tasks, single_changed = self.ds_plugin_mgr.get_tasks(options, secret_id, secret_data, schema, - start, last_synchronized_at, domain_id) + single_tasks, single_changed = self.ds_plugin_mgr.get_tasks( + options, + secret_id, + secret_data, + schema, + start, + last_synchronized_at, + domain_id, + ) tasks.extend(single_tasks) changed.extend(single_changed) except Exception as e: - _LOGGER.error(f'[create_cost_job] get_tasks error: {e}', exc_info=True) + _LOGGER.error(f"[create_cost_job] get_tasks error: {e}", exc_info=True) - if secret_type == 'MANUAL': - raise ERROR_GET_JOB_TASKS(secret_id=secret_id, data_source_id=data_source_id, reason=e) + if secret_type == "MANUAL": + raise ERROR_GET_JOB_TASKS( + secret_id=secret_id, data_source_id=data_source_id, reason=e + ) for task in tasks: _LOGGER.debug(f'[sync] task options: {task["task_options"]}') - _LOGGER.debug(f'[sync] changed: {changed}') + _LOGGER.debug(f"[sync] changed: {changed}") # Add Job Options - job_vo = self.job_mgr.create_job(data_source_id, domain_id, job_options, len(tasks), changed) + job_vo = self.job_mgr.create_job( + resource_group, + data_source_id, + workspace_id, + domain_id, + job_options, + len(tasks), + changed, + ) if self._check_duplicate_job(data_source_id, domain_id, job_vo): - self.job_mgr.change_error_status(job_vo, ERROR_DUPLICATE_JOB(data_source_id=data_source_id)) + self.job_mgr.change_error_status( + job_vo, ERROR_DUPLICATE_JOB(data_source_id=data_source_id) + ) else: if len(tasks) > 0: for task in tasks: job_task_vo = None - task_options = task['task_options'] + task_options = task["task_options"] try: - job_task_vo = self.job_task_mgr.create_job_task(job_vo.job_id, data_source_id, domain_id, - task_options) - self.job_task_mgr.push_job_task({ - 'task_options': task_options, - 'secret_id': task.get('secret_id'), - 'secret_data': task.get('secret_data', {}), - 'job_task_id': job_task_vo.job_task_id, - 'domain_id': domain_id - }) + job_task_vo = self.job_task_mgr.create_job_task( + job_vo.job_id, data_source_id, domain_id, task_options + ) + self.job_task_mgr.push_job_task( + { + "task_options": task_options, + "secret_id": task.get("secret_id"), + "secret_data": task.get("secret_data", {}), + "job_task_id": job_task_vo.job_task_id, + "domain_id": domain_id, + } + ) except Exception as e: if job_task_vo: - self.job_task_mgr.change_error_status(job_task_vo, e, secret_type) + self.job_task_mgr.change_error_status( + job_task_vo, e, secret_type + ) else: job_vo = self.job_mgr.change_success_status(job_vo) - self.data_source_mgr.update_data_source_by_vo({'last_synchronized_at': job_vo.created_at}, - data_source_vo) + self.data_source_mgr.update_data_source_by_vo( + {"last_synchronized_at": job_vo.created_at}, data_source_vo + ) return job_vo def _list_secret_ids_from_secret_type(self, data_source_vo, secret_type, domain_id): secret_ids = [] - if secret_type == 'MANUAL': + if secret_type == "MANUAL": secret_ids = [data_source_vo.plugin_info.secret_id] - elif secret_type == 'USE_SERVICE_ACCOUNT_SECRET': + elif secret_type == "USE_SERVICE_ACCOUNT_SECRET": secret_filter = {} provider = data_source_vo.provider if data_source_vo.secret_filter: secret_filter = data_source_vo.secret_filter.to_dict() - secret_ids = self._list_secret_ids_from_secret_filter(secret_filter, provider, domain_id) + secret_ids = self._list_secret_ids_from_secret_filter( + secret_filter, provider, domain_id + ) return secret_ids @@ -333,24 +422,39 @@ def _list_secret_ids_from_secret_filter(self, secret_filter, provider, domain_id secret_manager: SecretManager = self.locator.get_manager(SecretManager) _filter = self._set_secret_filter(secret_filter, provider) - query = {'filter': _filter} if _filter else {} + query = {"filter": _filter} if _filter else {} response = secret_manager.list_secrets(query, domain_id) - return [secret_info.get('secret_id') for secret_info in response.get('results', [])] + return [ + secret_info.get("secret_id") for secret_info in response.get("results", []) + ] @staticmethod def _set_secret_filter(secret_filter, provider): _filter = [] if provider: - _filter.append({'k': 'provider', 'v': provider, 'o': 'eq'}) - - if secret_filter and secret_filter.get('state') == 'ENABLED': - if 'secrets' in secret_filter and secret_filter['secrets']: - _filter.append({'k': 'secret_id', 'v': secret_filter['secrets'], 'o': 'in'}) - if 'service_accounts' in secret_filter and secret_filter['service_accounts']: - _filter.append({'k': 'service_account_id', 'v': secret_filter['service_accounts'], 'o': 'in'}) - if 'schemas' in secret_filter and secret_filter['schemas']: - _filter.append({'k': 'schema', 'v': secret_filter['schemas'], 'o': 'in'}) + _filter.append({"k": "provider", "v": provider, "o": "eq"}) + + if secret_filter and secret_filter.get("state") == "ENABLED": + if "secrets" in secret_filter and secret_filter["secrets"]: + _filter.append( + {"k": "secret_id", "v": secret_filter["secrets"], "o": "in"} + ) + if ( + "service_accounts" in secret_filter + and secret_filter["service_accounts"] + ): + _filter.append( + { + "k": "service_account_id", + "v": secret_filter["service_accounts"], + "o": "in", + } + ) + if "schemas" in secret_filter and secret_filter["schemas"]: + _filter.append( + {"k": "schema", "v": secret_filter["schemas"], "o": "in"} + ) return _filter @@ -361,19 +465,19 @@ def _get_service_account_id_and_project_id(self, secret_id, domain_id): secret_mgr: SecretManager = self.locator.get_manager(SecretManager) if secret_id: - _query = {'filter': [{'k': 'secret_id', 'v': secret_id, 'o': 'eq'}]} + _query = {"filter": [{"k": "secret_id", "v": secret_id, "o": "eq"}]} response = secret_mgr.list_secrets(_query, domain_id) - results = response.get('results', []) + results = response.get("results", []) if results: secret_info = results[0] - service_account_id = secret_info.get('service_account_id') - project_id = secret_info.get('project_id') + service_account_id = secret_info.get("service_account_id") + project_id = secret_info.get("project_id") return service_account_id, project_id @staticmethod def _append_tag_keys(tags_keys, cost_data): - cost_tags = cost_data.get('tags') or {} + cost_tags = cost_data.get("tags") or {} for key in cost_tags.keys(): if key not in tags_keys: @@ -382,7 +486,7 @@ def _append_tag_keys(tags_keys, cost_data): @staticmethod def _append_additional_info_keys(additional_info_keys, cost_data): - cost_additional_info = cost_data.get('additional_info') or {} + cost_additional_info = cost_data.get("additional_info") or {} for key in cost_additional_info.keys(): if key not in additional_info_keys: @@ -391,7 +495,7 @@ def _append_additional_info_keys(additional_info_keys, cost_data): @staticmethod def _append_data_keys(data_keys, cost_data): - cost_data_info = cost_data.get('data') or {} + cost_data_info = cost_data.get("data") or {} for key in cost_data_info.keys(): if key not in data_keys: @@ -399,7 +503,7 @@ def _append_data_keys(data_keys, cost_data): return data_keys def _get_secret_data(self, secret_id, domain_id): - secret_mgr: SecretManager = self.locator.get_manager('SecretManager') + secret_mgr: SecretManager = self.locator.get_manager("SecretManager") if secret_id: secret_data = secret_mgr.get_secret_data(secret_id, domain_id) else: @@ -409,64 +513,76 @@ def _get_secret_data(self, secret_id, domain_id): @staticmethod def _check_cost_data(cost_data): - if 'billed_date' not in cost_data: - _LOGGER.error(f'[_check_cost_data] cost_data: {cost_data}') - raise ERROR_REQUIRED_PARAMETER(key='plugin_cost_data.billed_date') + if "billed_date" not in cost_data: + _LOGGER.error(f"[_check_cost_data] cost_data: {cost_data}") + raise ERROR_REQUIRED_PARAMETER(key="plugin_cost_data.billed_date") def _create_cost_data(self, cost_data, job_task_vo, cost_options): - cost_data['cost'] = cost_data.get('cost', 0) - cost_data['job_id'] = job_task_vo.job_id - cost_data['job_task_id'] = job_task_vo.job_task_id - cost_data['data_source_id'] = job_task_vo.data_source_id - cost_data['domain_id'] = job_task_vo.domain_id - cost_data['billed_date'] = cost_data['billed_date'] + cost_data["cost"] = cost_data.get("cost", 0) + cost_data["job_id"] = job_task_vo.job_id + cost_data["job_task_id"] = job_task_vo.job_task_id + cost_data["data_source_id"] = job_task_vo.data_source_id + cost_data["domain_id"] = job_task_vo.domain_id + cost_data["billed_date"] = cost_data["billed_date"] - if 'service_account_id' in cost_options: - cost_data['service_account_id'] = cost_options['service_account_id'] + if "service_account_id" in cost_options: + cost_data["service_account_id"] = cost_options["service_account_id"] - if 'project_id' in cost_options: - cost_data['project_id'] = cost_options['project_id'] + if "project_id" in cost_options: + cost_data["project_id"] = cost_options["project_id"] self.cost_mgr.create_cost(cost_data, execute_rollback=False) - def _is_job_canceled(self, job_id, domain_id): + def _is_job_failed(self, job_id, domain_id): job_vo: Job = self.job_mgr.get_job(job_id, domain_id) - if job_vo.status == 'CANCELED': + if ["CANCELED", "FAILURE"] in job_vo.status: return True else: return False def _close_job(self, job_id, domain_id, data_source_id): job_vo: Job = self.job_mgr.get_job(job_id, domain_id) - no_preload_cache = job_vo.options.get('no_preload_cache', False) + no_preload_cache = job_vo.options.get("no_preload_cache", False) if job_vo.remained_tasks == 0: - if job_vo.status == 'IN_PROGRESS': + if job_vo.status == "IN_PROGRESS": try: self._aggregate_cost_data(job_vo) for changed_vo in job_vo.changed: - self._delete_changed_cost_data(job_vo, changed_vo.start, changed_vo.end, changed_vo.filter) + self._delete_changed_cost_data( + job_vo, changed_vo.start, changed_vo.end, changed_vo.filter + ) except Exception as e: - _LOGGER.error(f'[_close_job] aggregate cost data error: {e}', exc_info=True) + _LOGGER.error( + f"[_close_job] aggregate cost data error: {e}", exc_info=True + ) self._rollback_cost_data(job_vo) - self.job_mgr.change_error_status(job_vo, f'aggregate cost data error: {e}') + self.job_mgr.change_error_status( + job_vo, f"aggregate cost data error: {e}" + ) raise e try: self._delete_old_cost_data(domain_id, data_source_id) except Exception as e: - _LOGGER.error(f'[_close_job] delete old cost data error: {e}', exc_info=True) - self.job_mgr.change_error_status(job_vo, f'delete old cost data error: {e}') + _LOGGER.error( + f"[_close_job] delete old cost data error: {e}", exc_info=True + ) + self.job_mgr.change_error_status( + job_vo, f"delete old cost data error: {e}" + ) raise e try: self.cost_mgr.remove_stat_cache(domain_id, data_source_id) if not no_preload_cache: - self.job_mgr.preload_cost_stat_queries(domain_id, data_source_id) + self.job_mgr.preload_cost_stat_queries( + domain_id, data_source_id + ) self.budget_usage_mgr.update_budget_usage(domain_id, data_source_id) @@ -474,96 +590,132 @@ def _close_job(self, job_id, domain_id, data_source_id): self.job_mgr.change_success_status(job_vo) except Exception as e: - _LOGGER.error(f'[_close_job] cache and budget update error: {e}', exc_info=True) - self.job_mgr.change_error_status(job_vo, f'cache and budget update error: {e}') + _LOGGER.error( + f"[_close_job] cache and budget update error: {e}", + exc_info=True, + ) + self.job_mgr.change_error_status( + job_vo, f"cache and budget update error: {e}" + ) raise e - elif job_vo.status == 'ERROR': + elif job_vo.status == "ERROR": self._rollback_cost_data(job_vo) - self.job_mgr.update_job_by_vo({'finished_at': datetime.utcnow()}, job_vo) + self.job_mgr.update_job_by_vo( + {"finished_at": datetime.utcnow()}, job_vo + ) - elif job_vo.status == 'CANCELED': + elif job_vo.status == "CANCELED": self._rollback_cost_data(job_vo) def _update_keys(self, data_source_vo, tag_keys, additional_info_keys, data_keys): - self.data_source_mgr.update_data_source_by_vo({ - 'cost_tag_keys': tag_keys, - 'cost_additional_info_keys': additional_info_keys, - 'cost_data_keys': data_keys - }, data_source_vo) + self.data_source_mgr.update_data_source_by_vo( + { + "cost_tag_keys": tag_keys, + "cost_additional_info_keys": additional_info_keys, + "cost_data_keys": data_keys, + }, + data_source_vo, + ) def _rollback_cost_data(self, job_vo: Job): - cost_vos = self.cost_mgr.filter_costs(data_source_id=job_vo.data_source_id, domain_id=job_vo.domain_id, - job_id=job_vo.job_id) + cost_vos = self.cost_mgr.filter_costs( + data_source_id=job_vo.data_source_id, + domain_id=job_vo.domain_id, + job_id=job_vo.job_id, + ) - _LOGGER.debug(f'[_close_job] delete cost data created by job: {job_vo.job_id} (count = {cost_vos.count()})') + _LOGGER.debug( + f"[_close_job] delete cost data created by job: {job_vo.job_id} (count = {cost_vos.count()})" + ) cost_vos.delete() - monthly_cost_vos = self.cost_mgr.filter_monthly_costs(data_source_id=job_vo.data_source_id, - domain_id=job_vo.domain_id, job_id=job_vo.job_id) + monthly_cost_vos = self.cost_mgr.filter_monthly_costs( + data_source_id=job_vo.data_source_id, + domain_id=job_vo.domain_id, + job_id=job_vo.job_id, + ) - _LOGGER.debug(f'[_close_job] delete monthly cost data created by job: {job_vo.job_id} (count = {cost_vos.count()})') + _LOGGER.debug( + f"[_close_job] delete monthly cost data created by job: {job_vo.job_id} (count = {cost_vos.count()})" + ) monthly_cost_vos.delete() def _update_last_sync_time(self, job_vo: Job): - self.data_source_mgr: DataSourceManager = self.locator.get_manager('DataSourceManager') - data_source_vo = self.data_source_mgr.get_data_source(job_vo.data_source_id, job_vo.domain_id) - self.data_source_mgr.update_data_source_by_vo({'last_synchronized_at': job_vo.created_at}, data_source_vo) + self.data_source_mgr: DataSourceManager = self.locator.get_manager( + "DataSourceManager" + ) + data_source_vo = self.data_source_mgr.get_data_source( + job_vo.data_source_id, job_vo.domain_id + ) + self.data_source_mgr.update_data_source_by_vo( + {"last_synchronized_at": job_vo.created_at}, data_source_vo + ) def _delete_old_cost_data(self, data_source_id, domain_id): now = datetime.utcnow().date() - old_billed_month = (now - relativedelta(months=12)).strftime('%Y-%m') - old_billed_year = (now - relativedelta(months=36)).strftime('%Y') + old_billed_month = (now - relativedelta(months=12)).strftime("%Y-%m") + old_billed_year = (now - relativedelta(months=36)).strftime("%Y") cost_delete_query = { - 'filter': [ - {'k': 'billed_month', 'v': old_billed_month, 'o': 'lt'}, - {'k': 'data_source_id', 'v': data_source_id, 'o': 'eq'}, - {'k': 'domain_id', 'v': domain_id, 'o': 'eq'} + "filter": [ + {"k": "billed_month", "v": old_billed_month, "o": "lt"}, + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "domain_id", "v": domain_id, "o": "eq"}, ] } cost_vos, total_count = self.cost_mgr.list_costs(cost_delete_query) - _LOGGER.debug(f'[_delete_old_cost_data] delete costs (count = {total_count})') + _LOGGER.debug(f"[_delete_old_cost_data] delete costs (count = {total_count})") cost_vos.delete() monthly_cost_delete_query = { - 'filter': [ - {'k': 'billed_year', 'v': old_billed_year, 'o': 'lt'}, - {'k': 'data_source_id', 'v': data_source_id, 'o': 'eq'}, - {'k': 'domain_id', 'v': domain_id, 'o': 'eq'} + "filter": [ + {"k": "billed_year", "v": old_billed_year, "o": "lt"}, + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "domain_id", "v": domain_id, "o": "eq"}, ] } - monthly_cost_vos, total_count = self.cost_mgr.list_monthly_costs(monthly_cost_delete_query) - _LOGGER.debug(f'[_delete_old_cost_data] delete monthly costs (count = {total_count})') + monthly_cost_vos, total_count = self.cost_mgr.list_monthly_costs( + monthly_cost_delete_query + ) + _LOGGER.debug( + f"[_delete_old_cost_data] delete monthly costs (count = {total_count})" + ) monthly_cost_vos.delete() def _delete_changed_cost_data(self, job_vo: Job, start, end, change_filter): query = { - 'filter': [ - {'k': 'billed_month', 'v': start, 'o': 'gte'}, - {'k': 'data_source_id', 'v': job_vo.data_source_id, 'o': 'eq'}, - {'k': 'domain_id', 'v': job_vo.domain_id, 'o': 'eq'}, - {'k': 'job_id', 'v': job_vo.job_id, 'o': 'not'}, + "filter": [ + {"k": "billed_month", "v": start, "o": "gte"}, + {"k": "data_source_id", "v": job_vo.data_source_id, "o": "eq"}, + {"k": "domain_id", "v": job_vo.domain_id, "o": "eq"}, + {"k": "job_id", "v": job_vo.job_id, "o": "not"}, ] } if end: - query['filter'].append({'k': 'billed_month', 'v': end, 'o': 'lte'}) + query["filter"].append({"k": "billed_month", "v": end, "o": "lte"}) for key, value in change_filter.items(): - query['filter'].append({'k': key, 'v': value, 'o': 'eq'}) + query["filter"].append({"k": key, "v": value, "o": "eq"}) - _LOGGER.debug(f'[_delete_changed_cost_data] query: {query}') + _LOGGER.debug(f"[_delete_changed_cost_data] query: {query}") cost_vos, total_count = self.cost_mgr.list_costs(copy.deepcopy(query)) cost_vos.delete() - _LOGGER.debug(f'[_delete_changed_cost_data] delete costs (count = {total_count})') + _LOGGER.debug( + f"[_delete_changed_cost_data] delete costs (count = {total_count})" + ) - monthly_cost_vos, total_count = self.cost_mgr.list_monthly_costs(copy.deepcopy(query)) + monthly_cost_vos, total_count = self.cost_mgr.list_monthly_costs( + copy.deepcopy(query) + ) monthly_cost_vos.delete() - _LOGGER.debug(f'[_delete_changed_cost_data] delete monthly costs (count = {total_count})') + _LOGGER.debug( + f"[_delete_changed_cost_data] delete monthly costs (count = {total_count})" + ) def _aggregate_cost_data(self, job_vo: Job): data_source_id = job_vo.data_source_id @@ -572,89 +724,92 @@ def _aggregate_cost_data(self, job_vo: Job): job_task_ids = self._get_job_task_ids(job_id, domain_id) for job_task_id in job_task_ids: - for billed_month in self._distinct_billed_month(data_source_id, domain_id, job_id, job_task_id): - self._aggregate_monthly_cost_data(data_source_id, domain_id, job_id, job_task_id, billed_month) + for billed_month in self._distinct_billed_month( + data_source_id, domain_id, job_id, job_task_id + ): + self._aggregate_monthly_cost_data( + data_source_id, domain_id, job_id, job_task_id, billed_month + ) def _distinct_billed_month(self, data_source_id, domain_id, job_id, job_task_id): query = { - 'distinct': 'billed_month', - 'filter': [ - {'k': 'data_source_id', 'v': data_source_id, 'o': 'eq'}, - {'k': 'domain_id', 'v': domain_id, 'o': 'eq'}, - {'k': 'job_id', 'v': job_id, 'o': 'eq'}, - {'k': 'job_task_id', 'v': job_task_id, 'o': 'eq'}, + "distinct": "billed_month", + "filter": [ + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "domain_id", "v": domain_id, "o": "eq"}, + {"k": "job_id", "v": job_id, "o": "eq"}, + {"k": "job_task_id", "v": job_task_id, "o": "eq"}, ], - 'target': 'PRIMARY' # Execute a query to primary DB + "target": "PRIMARY", # Execute a query to primary DB } - _LOGGER.debug(f'[_distinct_cost_data] query: {query}') + _LOGGER.debug(f"[_distinct_cost_data] query: {query}") response = self.cost_mgr.stat_costs(query) - values = response.get('results', []) + values = response.get("results", []) - _LOGGER.debug(f'[_distinct_cost_data] billed_month: {values}') + _LOGGER.debug(f"[_distinct_cost_data] billed_month: {values}") return values - def _aggregate_monthly_cost_data(self, data_source_id, domain_id, job_id, job_task_id, billed_month): + def _aggregate_monthly_cost_data( + self, data_source_id, domain_id, job_id, job_task_id, billed_month + ): query = { - 'group_by': [ - 'usage_unit', - 'provider', - 'region_code', - 'region_key', - 'product', - 'usage_type', - 'resource', - 'tags', - 'additional_info', - 'service_account_id', - 'project_id', - 'project_group_id', - 'billed_year' + "group_by": [ + "usage_unit", + "provider", + "region_code", + "region_key", + "product", + "usage_type", + "resource", + "tags", + "additional_info", + "service_account_id", + "project_id", + "project_group_id", + "billed_year", ], - 'fields': { - 'cost': { - 'key': 'cost', - 'operator': 'sum' - }, - 'usage_quantity': { - 'key': 'usage_quantity', - 'operator': 'sum' - } + "fields": { + "cost": {"key": "cost", "operator": "sum"}, + "usage_quantity": {"key": "usage_quantity", "operator": "sum"}, }, - 'start': billed_month, - 'end': billed_month, - 'filter': [ - {'k': 'data_source_id', 'v': data_source_id, 'o': 'eq'}, - {'k': 'job_id', 'v': job_id, 'o': 'eq'}, - {'k': 'job_task_id', 'v': job_task_id, 'o': 'eq'}, - {'k': 'domain_id', 'v': domain_id, 'o': 'eq'}, + "start": billed_month, + "end": billed_month, + "filter": [ + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "job_id", "v": job_id, "o": "eq"}, + {"k": "job_task_id", "v": job_task_id, "o": "eq"}, + {"k": "domain_id", "v": domain_id, "o": "eq"}, ], - 'allow_disk_use': True # Allow disk use for large data + "allow_disk_use": True, # Allow disk use for large data } - response = self.cost_mgr.analyze_costs(query, target='PRIMARY') - results = response.get('results', []) + response = self.cost_mgr.analyze_costs(query, target="PRIMARY") + results = response.get("results", []) for aggregated_cost_data in results: - aggregated_cost_data['data_source_id'] = data_source_id - aggregated_cost_data['billed_month'] = billed_month - aggregated_cost_data['job_id'] = job_id - aggregated_cost_data['job_task_id'] = job_id - aggregated_cost_data['domain_id'] = domain_id + aggregated_cost_data["data_source_id"] = data_source_id + aggregated_cost_data["billed_month"] = billed_month + aggregated_cost_data["job_id"] = job_id + aggregated_cost_data["job_task_id"] = job_id + aggregated_cost_data["domain_id"] = domain_id self.cost_mgr.create_monthly_cost(aggregated_cost_data) _LOGGER.debug( - f'[_aggregate_monthly_cost_data] create monthly costs ({billed_month}): {job_id} (count = {len(results)})') + f"[_aggregate_monthly_cost_data] create monthly costs ({billed_month}): {job_id} (count = {len(results)})" + ) def _get_all_data_sources(self): - return self.data_source_mgr.filter_data_sources(state='ENABLED', data_source_type='EXTERNAL') + return self.data_source_mgr.filter_data_sources( + state="ENABLED", data_source_type="EXTERNAL" + ) def _check_duplicate_job(self, data_source_id, domain_id, this_job_vo: Job): query = { - 'filter': [ - {'k': 'data_source_id', 'v': data_source_id, 'o': 'eq'}, - {'k': 'domain_id', 'v': domain_id, 'o': 'eq'}, - {'k': 'status', 'v': 'IN_PROGRESS', 'o': 'eq'}, - {'k': 'job_id', 'v': this_job_vo.job_id, 'o': 'not'}, + "filter": [ + {"k": "data_source_id", "v": data_source_id, "o": "eq"}, + {"k": "domain_id", "v": domain_id, "o": "eq"}, + {"k": "status", "v": "IN_PROGRESS", "o": "eq"}, + {"k": "job_id", "v": this_job_vo.job_id, "o": "not"}, ] } @@ -672,13 +827,17 @@ def _check_duplicate_job(self, data_source_id, domain_id, this_job_vo: Job): @staticmethod def _get_start_last_synchronized_at(params): - start = params.get('start') - last_synchronized_at = utils.datetime_to_iso8601(params.get('last_synchronized_at')) + start = params.get("start") + last_synchronized_at = utils.datetime_to_iso8601( + params.get("last_synchronized_at") + ) return start, last_synchronized_at def _get_job_task_ids(self, job_id, domain_id): job_task_ids = [] - job_task_vos = self.job_task_mgr.filter_job_tasks(job_id=job_id, domain_id=domain_id) + job_task_vos = self.job_task_mgr.filter_job_tasks( + job_id=job_id, domain_id=domain_id + ) for job_task_vo in job_task_vos: job_task_ids.append(job_task_vo.job_task_id) diff --git a/src/spaceone/cost_analysis/service/job_task_service.py b/src/spaceone/cost_analysis/service/job_task_service.py index 8830a033..88461667 100644 --- a/src/spaceone/cost_analysis/service/job_task_service.py +++ b/src/spaceone/cost_analysis/service/job_task_service.py @@ -11,38 +11,46 @@ @mutation_handler @event_handler class JobTaskService(BaseService): - def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.job_task_mgr: JobTaskManager = self.locator.get_manager('JobTaskManager') + self.job_task_mgr: JobTaskManager = self.locator.get_manager("JobTaskManager") - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['job_task_id', 'domain_id']) + @transaction( + permission="cost-analysis:JobTask.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @check_required(["job_task_id", "domain_id"]) def get(self, params): - """ Get job_task + """Get job_task Args: params (dict): { 'job_task_id': 'str', 'domain_id': 'str', - 'only': 'list } Returns: job_task_vo (object) """ - job_task_id = params['job_task_id'] - domain_id = params['domain_id'] - - return self.job_task_mgr.get_job_task(job_task_id, domain_id, params.get('only')) - - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['domain_id']) - @append_query_filter(['job_task_id', 'status', 'job_id', 'data_source_id', 'domain_id']) - @append_keyword_filter(['job_task_id']) + job_task_id = params["job_task_id"] + workspace_id = params.get("workspace_id") + domain_id = params["domain_id"] + + return self.job_task_mgr.get_job_task(job_task_id, domain_id, workspace_id) + + @transaction( + permission="cost-analysis:JobTask.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["domain_id"]) + @append_query_filter( + ["job_task_id", "status", "job_id", "data_source_id", "domain_id"] + ) + @append_keyword_filter(["job_task_id"]) def list(self, params): - """ List job_tasks + """List job_tasks Args: params (dict): { @@ -50,6 +58,7 @@ def list(self, params): 'status': 'str', 'job_id': 'str', 'data_source_id': 'str', + 'workspace_id': 'list', 'domain_id': 'str', 'query': 'dict (spaceone.api.core.v1.Query)' } @@ -59,13 +68,17 @@ def list(self, params): total_count """ - query = params.get('query', {}) + query = params.get("query", {}) return self.job_task_mgr.list_job_tasks(query) - @transaction(append_meta={'authorization.scope': 'DOMAIN'}) - @check_required(['query', 'domain_id']) - @append_query_filter(['domain_id']) - @append_keyword_filter(['job_task_id']) + @transaction( + permission="cost-analysis:JobTask.read", + role_types=["DOMAIN_ADMIN", "WORKSPACE_OWNER", "WORKSPACE_MEMBER"], + ) + @change_value_by_rule("APPEND", "workspace_id", "*") + @check_required(["query", "domain_id"]) + @append_query_filter(["domain_id"]) + @append_keyword_filter(["job_task_id"]) def stat(self, params): """ Args: @@ -79,5 +92,5 @@ def stat(self, params): """ - query = params.get('query', {}) + query = params.get("query", {}) return self.job_task_mgr.stat_job_tasks(query) From 68d29d8397b630b2835fdae1c0836d979c28c6fd Mon Sep 17 00:00:00 2001 From: ImMin5 Date: Mon, 18 Dec 2023 02:40:53 +0900 Subject: [PATCH 3/4] feat: remove project_group_id at budget, budget_usage model --- src/spaceone/cost_analysis/model/budget_model.py | 7 +------ src/spaceone/cost_analysis/model/budget_usage_model.py | 6 +----- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/src/spaceone/cost_analysis/model/budget_model.py b/src/spaceone/cost_analysis/model/budget_model.py index b2ce924c..169fe959 100644 --- a/src/spaceone/cost_analysis/model/budget_model.py +++ b/src/spaceone/cost_analysis/model/budget_model.py @@ -65,18 +65,13 @@ class Budget(MongoModel): "limit", "provider_filter", "project_id", - "project_group_id", "data_source_id", ], - "change_query_keys": { - "user_projects": "project_id", - "user_project_groups": "project_group_id", - }, + "change_query_keys": {"user_projects": "project_id"}, "ordering": ["name"], "indexes": [ "name", "project_id", - "project_group_id", "data_source_id", "domain_id", ], diff --git a/src/spaceone/cost_analysis/model/budget_usage_model.py b/src/spaceone/cost_analysis/model/budget_usage_model.py index b03dedad..1305b0e7 100644 --- a/src/spaceone/cost_analysis/model/budget_usage_model.py +++ b/src/spaceone/cost_analysis/model/budget_usage_model.py @@ -32,17 +32,13 @@ class BudgetUsage(MongoModel): meta = { "updatable_fields": ["name", "cost", "limit"], "minimal_fields": ["budget_id", "name", "date", "usd_cost", "limit"], - "change_query_keys": { - "user_projects": "project_id", - "user_project_groups": "project_group_id", - }, + "change_query_keys": {"user_projects": "project_id"}, "ordering": ["budget_id", "date"], "indexes": [ "budget_id", "name", "date", "project_id", - "project_group_id", "data_source_id", "domain_id", ], From dfaab4f5fcde2ef6946891d824553d7931d73319 Mon Sep 17 00:00:00 2001 From: ImMin5 Date: Mon, 18 Dec 2023 02:42:24 +0900 Subject: [PATCH 4/4] feat: add Handler config example --- .../cost_analysis/conf/global_conf.py | 58 +++++++++++-------- 1 file changed, 33 insertions(+), 25 deletions(-) diff --git a/src/spaceone/cost_analysis/conf/global_conf.py b/src/spaceone/cost_analysis/conf/global_conf.py index 87c9639f..d45ba6e5 100644 --- a/src/spaceone/cost_analysis/conf/global_conf.py +++ b/src/spaceone/cost_analysis/conf/global_conf.py @@ -1,40 +1,48 @@ DATABASE_AUTO_CREATE_INDEX = True DATABASES = { - 'default': { - 'db': 'cost-analysis', - 'host': 'localhost', - 'port': 27017, - 'username': '', - 'password': '' + "default": { + "db": "cost-analysis", + "host": "localhost", + "port": 27017, + "username": "", + "password": "", } } CACHES = { - 'default': {}, - 'local': { - 'backend': 'spaceone.core.cache.local_cache.LocalCache', - 'max_size': 128, - 'ttl': 300 - } + "default": {}, + "local": { + "backend": "spaceone.core.cache.local_cache.LocalCache", + "max_size": 128, + "ttl": 300, + }, } HANDLERS = { + # "authentication": [{ + # "backend": "spaceone.core.handler.authentication_handler:SpaceONEAuthenticationHandler" + # }], + # "authorization": [{ + # "backend": "spaceone.core.handler.authorization_handler:SpaceONEAuthorizationHandler" + # }], + # "mutation": [{ + # "backend": "spaceone.core.handler.mutation_handler:SpaceONEMutationHandler" + # }], + # "event": [] } CONNECTORS = { - 'SpaceConnector': { - 'backend': 'spaceone.core.connector.space_connector.SpaceConnector', - 'endpoints': { - 'identity': 'grpc://identity:50051', - 'plugin': 'grpc://plugin:50051', - 'repository': 'grpc://repository:50051', - 'secret': 'grpc://secret:50051', - 'notification': 'grpc://notification:50051' - - } - }, - 'DataSourcePluginConnector': { + "SpaceConnector": { + "backend": "spaceone.core.connector.space_connector.SpaceConnector", + "endpoints": { + "identity": "grpc://identity:50051", + "plugin": "grpc://plugin:50051", + "repository": "grpc://repository:50051", + "secret": "grpc://secret:50051", + "notification": "grpc://notification:50051", + }, }, + "DataSourcePluginConnector": {}, } # Scheduler Settings @@ -47,7 +55,7 @@ # Job Settings JOB_TIMEOUT = 600 DATA_SOURCE_SYNC_HOUR = 16 # Hour (UTC) -COST_QUERY_CACHE_TIME = 4 # Day +COST_QUERY_CACHE_TIME = 4 # Day DEFAULT_EXCHANGE_RATE = { # 'KRW': 1178.7,