Skip to content

Commit

Permalink
pylint error fixed and formatted
Browse files Browse the repository at this point in the history
Signed-off-by: Shashank Reddy Boyapally <[email protected]>
  • Loading branch information
shashank-boyapally committed Feb 20, 2024
1 parent d1ad8af commit 3de05ff
Showing 1 changed file with 64 additions and 45 deletions.
109 changes: 64 additions & 45 deletions utils/orion_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from hunter.series import Metric, Series


def run_hunter_analyze(merged_df,test,output):
def run_hunter_analyze(merged_df, test, output):
"""Start hunter analyze function
Args:
Expand All @@ -24,51 +24,74 @@ def run_hunter_analyze(merged_df,test,output):
"""
merged_df["timestamp"] = pd.to_datetime(merged_df["timestamp"])
merged_df["timestamp"] = merged_df["timestamp"].astype(int) // 10**9
metrics = {column: Metric(1, 1.0)
for column in merged_df.columns
if column not in ["uuid","timestamp"]}
data = {column: merged_df[column]
for column in merged_df.columns
if column not in ["uuid","timestamp"]}
attributes={column: merged_df[column] for column in merged_df.columns if column in ["uuid"]}
series=Series(
metrics = {
column: Metric(1, 1.0)
for column in merged_df.columns
if column not in ["uuid", "timestamp"]
}
data = {
column: merged_df[column]
for column in merged_df.columns
if column not in ["uuid", "timestamp"]
}
attributes = {
column: merged_df[column] for column in merged_df.columns if column in ["uuid"]
}
series = Series(
test_name=test["name"],
branch=None,
time=list(merged_df["timestamp"]),
metrics=metrics,
data=data,
attributes=attributes
attributes=attributes,
)
#print(dumps(loads(merged_df.to_json(orient="records")),indent=4))
change_points=series.analyze().change_points_by_time
# print(dumps(loads(merged_df.to_json(orient="records")),indent=4))
change_points = series.analyze().change_points_by_time
print(series.analyze().change_points)
report=Report(series,change_points)
if output=="text":
output_table = report.produce_report(test_name="test",report_type=ReportType.LOG)
report = Report(series, change_points)
if output == "text":
output_table = report.produce_report(
test_name="test", report_type=ReportType.LOG
)
print(output_table)
elif output=="json":
change_points_by_metric=series.analyze().change_points
output_json=parse_json_output(merged_df,change_points_by_metric)
print(json.dumps(output_json,indent=4))
elif output == "json":
change_points_by_metric = series.analyze().change_points
output_json = parse_json_output(merged_df, change_points_by_metric)
print(json.dumps(output_json, indent=4))


def parse_json_output(merged_df,change_points_by_metric):
df_json=merged_df.to_json(orient="records")
df_json=json.loads(df_json)
def parse_json_output(merged_df, change_points_by_metric):
"""json output generator function
Args:
merged_df (pd.Dataframe): the dataframe to be converted to json
change_points_by_metric (_type_): different change point
Returns:
_type_: _description_
"""
df_json = merged_df.to_json(orient="records")
df_json = json.loads(df_json)

for index, entry in enumerate(df_json):
entry["metrics"] = {key: {"value" :entry.pop(key), "percentage_change":0} for key in entry.keys() - {"uuid", "timestamp"}}
entry["is_changepoint"]=False

entry["metrics"] = {
key: {"value": entry.pop(key), "percentage_change": 0}
for key in entry.keys() - {"uuid", "timestamp"}
}
entry["is_changepoint"] = False

for key in change_points_by_metric.keys():
for change_point in change_points_by_metric[key]:
index=change_point.index
percentage_change= ((change_point.stats.mean_2 - change_point.stats.mean_1)/change_point.stats.mean_1)*100
df_json[index]["metrics"][key]["percentage_change"]=percentage_change
df_json[index]["is_changepoint"]=True
index = change_point.index
percentage_change = (
(change_point.stats.mean_2 - change_point.stats.mean_1)
/ change_point.stats.mean_1
) * 100
df_json[index]["metrics"][key]["percentage_change"] = percentage_change
df_json[index]["is_changepoint"] = True

return df_json


# pylint: disable=too-many-locals
def get_metric_data(ids, index, metrics, match, logger):
Expand All @@ -86,22 +109,18 @@ def get_metric_data(ids, index, metrics, match, logger):
"""
dataframe_list = []
for metric in metrics:
metric_name = metric['name']
metric_name = metric["name"]
logger.info("Collecting %s", metric_name)
metric_of_interest = metric['metric_of_interest']
metric_of_interest = metric["metric_of_interest"]

if "agg" in metric.keys():
try:
cpu = match.get_agg_metric_query(
ids, index, metric
)
agg_value = metric['agg']['value']
agg_type = metric['agg']['agg_type']
cpu = match.get_agg_metric_query(ids, index, metric)
agg_value = metric["agg"]["value"]
agg_type = metric["agg"]["agg_type"]
agg_name = agg_value + "_" + agg_type
cpu_df = match.convert_to_df(cpu, columns=["uuid", agg_name])
cpu_df = cpu_df.rename(
columns={agg_name: metric_name+ "_" + agg_name}
)
cpu_df = cpu_df.rename(columns={agg_name: metric_name + "_" + agg_name})
dataframe_list.append(cpu_df)
logger.debug(cpu_df)

Expand All @@ -128,7 +147,7 @@ def get_metric_data(ids, index, metrics, match, logger):
return dataframe_list


def get_metadata(test,logger):
def get_metadata(test, logger):
"""Gets metadata of the run from each test
Args:
Expand All @@ -137,13 +156,12 @@ def get_metadata(test,logger):
Returns:
dict: dictionary of the metadata
"""
metadata=test['metadata']
metadata = test["metadata"]
metadata["ocpVersion"] = str(metadata["ocpVersion"])
logger.debug('metadata' + str(metadata))
logger.debug("metadata" + str(metadata))
return metadata



def set_logging(level, logger):
"""sets log level and format
Expand All @@ -164,7 +182,8 @@ def set_logging(level, logger):
logger.addHandler(handler)
return logger

def load_config(config,logger):

def load_config(config, logger):
"""Loads config file
Args:
Expand Down

0 comments on commit 3de05ff

Please sign in to comment.