Skip to content

Commit

Permalink
Adding baseline and uuid options
Browse files Browse the repository at this point in the history
Signed-off-by: Paige Rubendall <[email protected]>
  • Loading branch information
paigerube14 committed Mar 6, 2024
1 parent 8014654 commit efd6876
Show file tree
Hide file tree
Showing 4 changed files with 83 additions and 22 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pylint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ jobs:
pip install .
- name: Analysing the code with pylint
run: |
pylint -d C0103 $(git ls-files '*.py')
pylint -d C0103 -d R0912 $(git ls-files '*/*.py' '*.py')
16 changes: 16 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,3 +97,19 @@ Additionally, users can specify a custom path for the output CSV file using the
Orion's seamless integration with metadata and hunter ensures a robust regression detection tool for perf-scale CPT runs.


```--uuid``` : If you have a specific uuid in mind (maybe a current run), you can bypass the metadata configuration portion of the config file and use this paraemter. You will still need to specify a config file that contains a metrics section for the metrics you want to collect on the current uuid and uuids that match the metadata of the uuid configuration

```
tests :
- name : current-uuid-etcd-duration
metrics :
- name: etcdDisck
metricName : 99thEtcdDiskBackendCommitDurationSeconds
metric_of_interest: value
agg:
value: duration
agg_type: avg
```

Orion provides flexibility if you know the comparison uuid you want to compare among, use the ```--baseline``` flag. This should only be used in conjunction when setting uuid. Similar to the uuid section mentioned above, you'll have to set a metrics section to specify the data points you want to collect on

58 changes: 38 additions & 20 deletions orion.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@
import pandas as pd

from fmatch.matcher import Matcher
from utils.orion_funcs import run_hunter_analyze, get_metadata, \
set_logging, load_config, get_metric_data
from utils import orion_funcs


@click.group()
Expand All @@ -23,22 +22,28 @@ def cli():

# pylint: disable=too-many-locals
@click.command()
@click.option("--uuid", default="", help="UUID to use as base for comparisons")
@click.option("--baseline", default="", help="UUID to use as base for comparisons")
@click.option("--config", default="config.yaml", help="Path to the configuration file")
@click.option("--output", default="output.csv", help="Path to save the output csv file")
@click.option("--debug", is_flag=True, help="log level ")
@click.option("--hunter-analyze",is_flag=True, help="run hunter analyze")
def orion(config, debug, output,hunter_analyze):
def orion(**kwargs):
"""Orion is the cli tool to detect regressions over the runs
Args:
uuid (str): gather metrics based on uuid
baseline (str): baseline uuid to compare against uuid (uuid must be set when using baseline)
config (str): path to the config file
debug (bool): lets you log debug mode
output (str): path to the output csv file
hunter_analyze (bool): turns on hunter analysis of gathered uuid(s) data
"""
level = logging.DEBUG if debug else logging.INFO

level = logging.DEBUG if kwargs["debug"] else logging.INFO
logger = logging.getLogger("Orion")
logger = set_logging(level, logger)
data = load_config(config,logger)
logger = orion_funcs.set_logging(level, logger)
data = orion_funcs.load_config(kwargs["config"],logger)
ES_URL=None

if "ES_SERVER" in data.keys():
Expand All @@ -51,13 +56,23 @@ def orion(config, debug, output,hunter_analyze):
sys.exit(1)

for test in data["tests"]:
metadata = get_metadata(test, logger)
logger.info("The test %s has started", test["name"])
uuid = kwargs["uuid"]
baseline = kwargs["baseline"]
match = Matcher(index="perf_scale_ci", level=level, ES_URL=ES_URL)
uuids = match.get_uuid_by_metadata(metadata)
if len(uuids) == 0:
print("No UUID present for given metadata")
sys.exit()
if kwargs["uuid"] == "":
metadata = orion_funcs.get_metadata(test, logger)
else:
metadata = orion_funcs.get_uuid_metadata(uuid,match,logger)

logger.info("The test %s has started", test["name"])
if baseline == "":
uuids = match.get_uuid_by_metadata(metadata)
if len(uuids) == 0:
print("No UUID present for given metadata")
sys.exit()
else:
uuids = baseline.split(',')
uuids.append(uuid)

if metadata["benchmark.keyword"] == "k8s-netperf" :
index = "k8s-netperf"
Expand All @@ -67,22 +82,25 @@ def orion(config, debug, output,hunter_analyze):
ids = uuids
else:
index = "ripsaw-kube-burner"
runs = match.match_kube_burner(uuids)
ids = match.filter_runs(runs, runs)
if baseline == "":
runs = match.match_kube_burner(uuids)
ids = match.filter_runs(runs, runs)
else:
ids = uuids

metrics = test["metrics"]
dataframe_list = get_metric_data(ids, index, metrics, match, logger)
dataframe_list = orion_funcs.get_metric_data(ids, index, metrics, match, logger)

merged_df = reduce(
lambda left, right: pd.merge(left, right, on="uuid", how="inner"),
dataframe_list,
)
match.save_results(merged_df, csv_file_path=output.split(".")[0]+"-"+test['name']+".csv")

if hunter_analyze:
run_hunter_analyze(merged_df,test)

match.save_results(
merged_df, csv_file_path=kwargs["output"].split(".")[0]+"-"+test['name']+".csv"
)

if kwargs["hunter_analyze"]:
orion_funcs.run_hunter_analyze(merged_df,test)


if __name__ == "__main__":
Expand Down
29 changes: 28 additions & 1 deletion utils/orion_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,34 @@ def get_metadata(test,logger):
Returns:
dict: dictionary of the metadata
"""
metadata=test['metadata']
metadata = {}
for k,v in test.items():
if k in ["metrics","name"]:
continue
metadata[k] = v
metadata["ocpVersion"] = str(metadata["ocpVersion"])
logger.debug('metadata' + str(metadata))
return metadata


def get_uuid_metadata(uuid,match,logger):
"""Gets metadata of the run from each test
Args:
uuid (str): str of uuid ot find metadata of
match: the fmatch instance
Returns:
dict: dictionary of the metadata
"""

test= match.get_metadata_by_uuid(uuid)
metadata = {}
for k,v in test.items():
if k in ["metrics","name"]:
continue
metadata[k] = v
metadata["ocpVersion"] = str(metadata["ocpVersion"])
logger.debug('metadata' + str(metadata))
return metadata
Expand Down

0 comments on commit efd6876

Please sign in to comment.