Skip to content

Commit

Permalink
Merge pull request #58 from whdalsrnt/master
Browse files Browse the repository at this point in the history
Implement Widget Load Function
  • Loading branch information
whdalsrnt authored Jun 7, 2024
2 parents 28a4cde + 36f7bd2 commit 47962a2
Show file tree
Hide file tree
Showing 9 changed files with 311 additions and 134 deletions.
4 changes: 4 additions & 0 deletions src/spaceone/dashboard/error/data_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,7 @@ class ERROR_NOT_SUPPORTED_SOURCE_TYPE(ERROR_INVALID_ARGUMENT):

class ERROR_QUERY_OPTION(ERROR_INVALID_ARGUMENT):
_message = "Query option is invalid. (key = {key})"


class ERROR_NOT_SUPPORTED_QUERY_OPTION(ERROR_INVALID_ARGUMENT):
_message = "Query option is not supported. (key = {key})"
99 changes: 94 additions & 5 deletions src/spaceone/dashboard/manager/data_table_manager/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
import pandas as pd

from spaceone.core.manager import BaseManager
from spaceone.dashboard.error.data_table import ERROR_QUERY_OPTION
from spaceone.dashboard.error.data_table import (
ERROR_QUERY_OPTION,
ERROR_NOT_SUPPORTED_QUERY_OPTION,
)

_LOGGER = logging.getLogger(__name__)

Expand All @@ -17,17 +20,103 @@ def response(self, sort: list = None, page: dict = None) -> dict:
total_count = len(self.df)

if sort:
self._apply_sort(sort)
self.apply_sort(sort)

if page:
self._apply_page(page)
self.apply_page(page)

return {
"results": self.df.to_dict(orient="records"),
"total_count": total_count,
}

def _apply_sort(self, sort: list) -> None:
def apply_filter(self, filter: list) -> None:
if len(self.df) > 0:
for condition in filter:
key = condition.get("key", condition.get("k"))
operator = condition.get("operator", condition.get("o"))
value = condition.get("value", condition.get("v"))

if operator in ["in", "not in"]:
if not isinstance(value, list):
raise ERROR_QUERY_OPTION(key="filter")

if key and operator and value:
try:
if operator == "in":
self.df = self.df[self.df[key].isin(value)]
elif operator == "not_in":
self.df = self.df[~self.df[key].isin(value)]
elif operator == "eq":
if isinstance(value, int) or isinstance(value, float):
self.df = self.df.query(f"{key} == {value}")
else:
self.df = self.df.query(f"{key} == '{value}'")
elif operator == "not":
if isinstance(value, int) or isinstance(value, float):
self.df = self.df.query(f"{key} != {value}")
else:
self.df = self.df.query(f"{key} != '{value}'")
elif operator == "gt":
if isinstance(value, int) or isinstance(value, float):
self.df = self.df.query(f"{key} > {value}")
else:
self.df = self.df.query(f"{key} > '{value}'")
elif operator == "gte":
if isinstance(value, int) or isinstance(value, float):
self.df = self.df.query(f"{key} >= {value}")
else:
self.df = self.df.query(f"{key} >= '{value}'")
elif operator == "lt":
if isinstance(value, int) or isinstance(value, float):
self.df = self.df.query(f"{key} < {value}")
else:
self.df = self.df.query(f"{key} < '{value}'")
elif operator == "lte":
if isinstance(value, int) or isinstance(value, float):
self.df = self.df.query(f"{key} <= {value}")
else:
self.df = self.df.query(f"{key} <= '{value}'")
elif operator == "contain":
self.df = self.df[self.df[key].str.contains(str(value))]
elif operator == "not_contain":
self.df = self.df[~self.df[key].str.contains(str(value))]
else:
raise ERROR_NOT_SUPPORTED_QUERY_OPTION(
key=f"filter.operator.{operator}"
)
except Exception as e:
raise ERROR_QUERY_OPTION(key="filter")
else:
raise ERROR_QUERY_OPTION(key="filter")

def group_by(self, fields: dict, group_by: list = None) -> None:
if len(self.df) > 0:
columns = list(fields.keys())
if group_by:
columns.extend(group_by)

self.df = self.df[columns]

agg_options = {}
for name, options in fields.items():
operator = options.get("operator", "sum")
if operator not in ["sum", "average", "max", "min"]:
raise ERROR_NOT_SUPPORTED_QUERY_OPTION(
key=f"fields.operator.{operator}"
)

if operator == "average":
operator = "mean"

agg_options[name] = operator

if group_by:
self.df = self.df.groupby(group_by).agg(agg_options).reset_index()
else:
self.df = self.df.agg(agg_options).to_frame().T

def apply_sort(self, sort: list) -> None:
if len(self.df) > 0:
keys = []
ascendings = []
Expand All @@ -46,7 +135,7 @@ def _apply_sort(self, sort: list) -> None:
_LOGGER.error(f"[_sort] Sort Error: {e}")
raise ERROR_QUERY_OPTION(key="sort")

def _apply_page(self, page: dict) -> None:
def apply_page(self, page: dict) -> None:
if len(self.df) > 0:
if limit := page.get("limit"):
if limit > 0:
Expand Down
152 changes: 134 additions & 18 deletions src/spaceone/dashboard/manager/data_table_manager/data_source_manager.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import copy
from typing import Literal, Tuple
from datetime import datetime
from dateutil.relativedelta import relativedelta
Expand All @@ -19,6 +20,105 @@ def __init__(self, *args, **kwargs):
self.cost_analysis_mgr = CostAnalysisManager()
self.inventory_mgr = InventoryManager()

@staticmethod
def get_data_and_labels_info(options: dict) -> Tuple[dict, dict]:
data_name = options.get("data_name")
data_unit = options.get("data_unit")
group_by = options.get("group_by")
date_format = options.get("date_format", "SINGLE")
additional_labels = options.get("additional_labels")

if data_name is None:
raise ERROR_REQUIRED_PARAMETER(key="options.data_name")

data_info = {data_name: {}}

if data_unit:
data_info[data_name]["unit"] = data_unit

labels_info = {}

if group_by:
for group_option in copy.deepcopy(group_by):
if isinstance(group_option, dict):
group_name = group_option.get("name")
group_key = group_option.get("key")
if "." in group_key:
group_key = group_key.split(".")[-1]

name = group_name or group_key
if name is None:
raise ERROR_REQUIRED_PARAMETER(key="options.group_by.key")

if group_name:
del group_option["name"]

if group_key:
del group_option["key"]

labels_info[group_name] = group_option
else:
labels_info[group_option] = {}

if additional_labels:
for key in additional_labels.keys():
labels_info[key] = {}

if date_format == "SINGLE":
labels_info["Date"] = {}
else:
labels_info["Year"] = {}
labels_info["Month"] = {}
labels_info["Day"] = {}

return data_info, labels_info

def load_data_table_from_widget(
self, source_type: str, options: dict, query: dict, vars: dict = None
) -> dict:
self._check_query(query)
granularity = query["granularity"]
start = query["start"]
end = query["end"]
fields = query.get("fields")
group_by = query.get("group_by")
filter = query.get("filter")
sort = query.get("sort")
page = query.get("page")

self.load_data_source(source_type, options, granularity, start, end, vars=vars)

if filter:
self.apply_filter(filter)

if fields:
self.group_by(fields, group_by)

return self.response(sort, page)

@staticmethod
def _check_query(query: dict) -> None:
if "granularity" not in query:
raise ERROR_REQUIRED_PARAMETER(key="query.granularity")

if "start" not in query:
raise ERROR_REQUIRED_PARAMETER(key="query.start")

if "end" not in query:
raise ERROR_REQUIRED_PARAMETER(key="query.end")

if "fields" not in query:
raise ERROR_REQUIRED_PARAMETER(key="query.fields")

if "select" in query:
raise ERROR_NOT_SUPPORTED_QUERY_OPTION(key="query.select")

if "field_group" in query:
raise ERROR_NOT_SUPPORTED_QUERY_OPTION(key="query.field_group")

if "filter_or" in query:
raise ERROR_NOT_SUPPORTED_QUERY_OPTION(key="query.filter_or")

def load_data_source(
self,
source_type: str,
Expand All @@ -28,16 +128,17 @@ def load_data_source(
end: str = None,
sort: list = None,
page: dict = None,
vars: dict = None,
) -> dict:
start, end = self._get_time_from_granularity(granularity, start, end)

if timediff := options.get("timediff"):
start, end = self._change_time(start, end, timediff)

if source_type == "COST":
self._analyze_cost(options, granularity, start, end)
self._analyze_cost(options, granularity, start, end, vars)
elif source_type == "ASSET":
self._analyze_asset(options, granularity, start, end)
self._analyze_asset(options, granularity, start, end, vars)
else:
raise ERROR_NOT_SUPPORTED_SOURCE_TYPE(source_type=source_type)

Expand All @@ -56,6 +157,7 @@ def _analyze_asset(
granularity: GRANULARITY,
start: str,
end: str,
vars: dict = None,
) -> None:
asset_info = options.get("ASSET", {})
metric_id = asset_info.get("metric_id")
Expand All @@ -75,15 +177,15 @@ def _analyze_asset(
options.get("group_by"),
options.get("filter"),
options.get("filter_or"),
vars=vars,
)

params = {"metric_id": metric_id, "query": query}

response = self.inventory_mgr.analyze_metric_data(params)
results = response.get("results", [])

if date_format == "SEPARATE":
results = self._change_datetime_format(results)
results = self._change_datetime_format(results, date_format)

self.df = pd.DataFrame(results)

Expand All @@ -93,6 +195,7 @@ def _analyze_cost(
granularity: GRANULARITY,
start: str,
end: str,
vars: dict = None,
) -> None:
cost_info = options.get("COST", {})
data_source_id = cost_info.get("data_source_id")
Expand All @@ -115,34 +218,37 @@ def _analyze_cost(
options.get("group_by"),
options.get("filter"),
options.get("filter_or"),
vars=vars,
)

params = {"data_source_id": data_source_id, "query": query}

response = self.cost_analysis_mgr.analyze_cost(params)
results = response.get("results", [])

if date_format == "SEPARATE":
results = self._change_datetime_format(results)
results = self._change_datetime_format(results, date_format)

self.df = pd.DataFrame(results)

@staticmethod
def _change_datetime_format(results: list) -> list:
def _change_datetime_format(results: list, date_format: str) -> list:
changed_results = []
for result in results:
if date := result.get("date"):
if len(date) == 4:
result["year"] = date
elif len(date) == 7:
year, month = date.split("-")
result["year"] = year
result["month"] = month
elif len(date) == 10:
year, month, day = date.split("-")
result["year"] = year
result["month"] = month
result["day"] = day
if date_format == "SINGLE":
result["Date"] = date
else:
if len(date) == 4:
result["Year"] = date
elif len(date) == 7:
year, month = date.split("-")
result["Year"] = year
result["Month"] = month
elif len(date) == 10:
year, month, day = date.split("-")
result["Year"] = year
result["Month"] = month
result["Day"] = day

del result["date"]
changed_results.append(result)
Expand Down Expand Up @@ -234,7 +340,17 @@ def _make_query(
group_by: list = None,
filter: list = None,
filter_or: list = None,
vars: dict = None,
):
if vars:
filter = filter or []
for key, value in vars.items():
if key in ["workspace_id", "project_id", "service_account_id"]:
if isinstance(value, list):
filter.append({"key": key, "value": value, "operator": "in"})
else:
filter.append({"key": key, "value": value, "operator": "eq"})

return {
"granularity": granularity,
"start": start,
Expand Down
1 change: 1 addition & 0 deletions src/spaceone/dashboard/model/private_widget/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ class PrivateWidgetDeleteRequest(BaseModel):

class PrivateWidgetLoadRequest(BaseModel):
widget_id: str
data_table_id: str
query: dict
vars: Union[dict, None] = None
user_id: str
Expand Down
1 change: 1 addition & 0 deletions src/spaceone/dashboard/model/public_widget/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ class PublicWidgetGetRequest(BaseModel):

class PublicWidgetLoadRequest(BaseModel):
widget_id: str
data_table_id: str
query: dict
vars: Union[dict, None] = None
workspace_id: Union[str, None] = None
Expand Down
Loading

0 comments on commit 47962a2

Please sign in to comment.