From daeb5bd303947cb0911e3392ffb7865cd3ce4224 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 10 Feb 2025 16:10:41 -0500 Subject: [PATCH 01/24] #346 Do not make the combined yaml file anymore - instead read yaml info as strings - append strings to combine yaml information across yamls - load combined yaml info string - split up compile and pp classes to make script simpler and easier to follow --- fre/yamltools/combine_compile.py | 116 ++++++++ fre/yamltools/combine_pp.py | 249 ++++++++++++++++ fre/yamltools/combine_yamls.py | 488 +++---------------------------- 3 files changed, 399 insertions(+), 454 deletions(-) create mode 100644 fre/yamltools/combine_compile.py create mode 100644 fre/yamltools/combine_pp.py diff --git a/fre/yamltools/combine_compile.py b/fre/yamltools/combine_compile.py new file mode 100644 index 00000000..816fcad5 --- /dev/null +++ b/fre/yamltools/combine_compile.py @@ -0,0 +1,116 @@ +import os +import yaml + +def get_compile_paths(full_path,loaded_yml): + """ + """ + for key,value in loaded_yml.items(): + if key == "build": + py_path = os.path.join(full_path,value.get("platformYaml")) + cy_path = os.path.join(full_path,value.get("compileYaml")) + + return (py_path, cy_path) + +## COMPILE CLASS ## +class init_compile_yaml(): + """ class holding routines for initalizing compilation yamls """ + def __init__(self,yamlfile,platform,target,join_constructor): + """ + Process to combine yamls applicable to compilation + """ + self.yml = yamlfile + #self.name = yamlfile.split(".")[0] + self.namenopath = self.yml.split("/")[-1].split(".")[0] + self.platform = platform + self.target = target + + # Register tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # Define click options in string + yaml_content = (f'name: &name "{self.namenopath}"\n' + f'platform: &platform "{self.platform}"\n' + f'target: &target "{self.target}"\n') + + # Read model yaml as string + with open(self.yml,'r') as f: + model_content = f.read() + + # Combine information as strings + yaml_content += model_content + + # Load string as yaml + yml=yaml.load(yaml_content, Loader=yaml.Loader) + + # Return the combined string and loaded yaml + print(f" model yaml: {self.yml}") + return (yaml_content,yml) + + def combine_compile(self,yaml_content,loaded_yaml): + """ + Combine compile yaml with the defined combined.yaml + """ + self.mainyaml_dir = os.path.dirname(self.yml) + + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,loaded_yaml) + + # copy compile yaml info into combined yaml + if cy_path is not None: + with open(cy_path,'r') as cf: + compile_content = cf.read() + + # Combine information as strings + yaml_content += compile_content + + # Load string as yaml + yml=yaml.load(yaml_content, Loader=yaml.Loader) + + # Return the combined string and loaded yaml + print(f" compile yaml: {cy_path}") + return (yaml_content,yml) + + def combine_platforms(self, yaml_content, loaded_yaml): + """ + Combine platforms yaml with the defined combined.yaml + """ + self.mainyaml_dir = os.path.dirname(self.yml) + + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir, loaded_yaml) + + # copy compile yaml info into combined yaml + if py_path is not None: + with open(py_path,'r') as pf: + platform_content = pf.read() + + # Combine information as strings + yaml_content += platform_content + + # Load string as yaml + yml=yaml.load(yaml_content, Loader=yaml.Loader) + + # Return the combined string and loaded yaml + print(f" platforms yaml: {py_path}") + return (yaml_content, yml) + + def clean_yaml(self, yaml_content, loaded_yaml): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in loaded_yaml.keys(): + del loaded_yaml[kc] + + return loaded_yaml diff --git a/fre/yamltools/combine_pp.py b/fre/yamltools/combine_pp.py new file mode 100644 index 00000000..bcf7e58f --- /dev/null +++ b/fre/yamltools/combine_pp.py @@ -0,0 +1,249 @@ +import os +import yaml +from pathlib import Path + +def experiment_check(mainyaml_dir,experiment,loaded_yaml): + """ + Check that the experiment given is an experiment listed in the model yaml. + Extract experiment specific information and file paths. + Arguments: + mainyaml_dir : model yaml file + comb : combined yaml file name + experiment : experiment name + """ +# comb_model=yaml_load(comb) +# + # Check if exp name given is actually valid experiment listed in combined yaml + exp_list = [] + for i in loaded_yaml.get("experiments"): + exp_list.append(i.get("name")) + + if experiment not in exp_list: + raise NameError(f"{experiment} is not in the list of experiments") + + # Extract yaml path for exp. provided + # if experiment matches name in list of experiments in yaml, extract file path + for i in loaded_yaml.get("experiments"): + if experiment == i.get("name"): + expyaml=i.get("pp") + analysisyaml=i.get("analysis") + + if expyaml is not None: + ey_path=[] + for e in expyaml: + if Path(os.path.join(mainyaml_dir,e)).exists(): + ey=Path(os.path.join(mainyaml_dir,e)) + ey_path.append(ey) + else: + raise ValueError(f"Experiment yaml path given ({e}) does not exist.") + else: + raise ValueError("No experiment yaml path given!") + + if analysisyaml is not None: + ay_path=[] + for a in analysisyaml: + # prepend the directory containing the yaml + if Path(os.path.join(mainyaml_dir, a)).exists(): + ay=Path(os.path.join(mainyaml_dir,a)) + ay_path.append(ay) + else: + raise ValueError("Incorrect analysis yaml path given; does not exist.") + else: + ay_path=None + + return (ey_path,ay_path) + +## PP CLASS ## +class init_pp_yaml(): + """ class holding routines for initalizing post-processing yamls """ + def __init__(self,yamlfile,experiment,platform,target,join_constructor): + """ + Process to combine the applicable yamls for post-processing + """ + self.yml = yamlfile + self.name = experiment + self.platform = platform + self.target = target + + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + +# # Name of the combined yaml +# self.combined=f"combined-{self.name}.yaml" +# +# print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + full = [] + full.append(f'name: &name "{self.name}"\n') + full.append(f'platform: &platform "{self.platform}"\n') + full.append(f'target: &target "{self.target}"\n') + with open(self.yml,'r') as f: + content = f.readlines() + + f1 = full + content + f2="".join(f1) +# print(f2) + + yml=yaml.load(f2,Loader=yaml.Loader) + return (f1,yml) + + # print(f" model yaml: {self.yml}") + + def combine_experiment(self,list1,yam): + """ + Combine experiment yamls with the defined combined.yaml. + If more than 1 pp yaml defined, return a list of paths. + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.name,yam) + + print(ey_path) + ## COMBINE EXPERIMENT YAML INFO + # If only 1 pp yaml defined, combine with model yaml + if ey_path is not None and len(ey_path) == 1: + #expyaml_path = os.path.join(mainyaml_dir, i) + with open(ey_path,'r') as eyp: + content = eyp.readlines() + + new_list = list1 + content + f2="".join(new_list) + + # If more than 1 pp yaml listed + # (Must be done for aliases defined) + elif ey_path is not None and len(ey_path) > 1: + pp_yamls = [] + with open(ey_path[0],'r') as eyp0: + content = eyp0.readlines() + new_list1 = list1 + content + f2="".join(new_list1) + pp_yamls.append(new_list1) + + for i in ey_path[1:]: +# pp_exp = str(i).rsplit('/', maxsplit=1)[-1] + with open(i,'r') as eyp: + content = eyp.readlines() + + new_list_i = list1 + content + f3="".join(new_list_i) + pp_yamls.append(new_list_i) +# print(pp_yamls) +# quit() + return pp_yamls + + def combine_analysis(self,list2,yam): + """ + Combine analysis yamls with the defined combined.yaml + If more than 1 analysis yaml defined, return a list of paths. + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.name,yam) + + ## COMBINE EXPERIMENT YAML INFO + # If only 1 pp yaml defined, combine with model yaml + if ay_path is not None and len(ay_path) == 1: + #expyaml_path = os.path.join(mainyaml_dir, i) + with open(ay_path,'r') as ayp: + content = ayp.readlines() + + new_list = list2 + content + f2="".join(new_list) +# #print(f2) +# + # If more than 1 pp yaml listed + # (Must be done for aliases defined) + elif ay_path is not None and len(ay_path) > 1: + analysis_yamls = [] + with open(ay_path[0],'r') as ayp0: + content = ayp0.readlines() + new_list2 = list2 + content + #f2="".join(new_list2) + analysis_yamls.append(new_list2) + + for i in ay_path[1:]: + with open(i,'r') as ayp: + content = ayp.readlines() + + new_list_i = list2 + content + f3="".join(new_list_i) + analysis_yamls.append(new_list_i) + return analysis_yamls + + def merge_multiple_yamls(self, pp_list, analysis_list): + """ + Merge separately combined post-processing and analysis + yamls into fully combined yaml (without overwriting). + """ + result = {} + + # If more than one post-processing yaml is listed, update + # dictionary with content from 1st yaml in list + # Looping through rest of yamls listed, compare key value pairs. + # If instance of key is a dictionary in both result and loaded + # yamlfile, update the key in result to + # include the loaded yaml file's value. + if pp_list is not None and len(pp_list) > 1: + newnewnew = "".join(pp_list[0]) + result.update(yaml.load(newnewnew,Loader=yaml.Loader)) + for i in pp_list[1:]: + morenew = "".join(i) + yf = yaml.load(morenew,Loader=yaml.Loader) + for key in result: + if key in yf: + if isinstance(result[key],dict) and isinstance(yf[key],dict): + if key == "postprocess": + result[key]["components"] = yf[key]["components"] + result[key]["components"] + # If only one post-processing yaml listed, do nothing + # (already combined in 'combine_experiments' function) + elif pp_list is not None and len(pp_list) == 1: + pass + + # If more than one analysis yaml is listed, update dictionary with content from 1st yaml + # Looping through rest of yamls listed, compare key value pairs. + # If instance of key is a dictionary in both result and loaded yamlfile, update the key + # in result to include the loaded yaml file's value. + if analysis_list is not None and len(analysis_list) > 1: + new4 = "".join(analysis_list[0]) + result.update(yaml.load(new4,Loader=yaml.Loader)) + for i in analysis_list[1:]: + more_new4 = "".join(i) + yf = yaml.load(more_new4,Loader=yaml.Loader) + for key in result: + if key in yf: + if isinstance(result[key],dict) and isinstance(yf[key],dict): + if key == "analysis": + result[key] = yf[key] | result[key] + # If only one analysis yaml listed, do nothing + # (already combined in 'combine_analysis' function) + elif analysis_list is not None and len(analysis_list) == 1: + pass + + print(result) + +# def clean_yaml(self): +# """ +# Clean the yaml; remove unnecessary sections in +# final combined yaml. +# """ +# # Load the fully combined yaml +# full_yaml = yaml_load(self.combined) +# +# # Clean the yaml +# # If keys exists, delete: +# keys_clean=["fre_properties", "shared", "experiments"] +# for kc in keys_clean: +# if kc in full_yaml.keys(): +# del full_yaml[kc] +# +# # Dump cleaned dictionary back into combined yaml file +# with open(self.combined,'w') as f: +# yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) +# +# print(f"Combined yaml located here: {os.path.abspath(self.combined)}") +# return self.combined diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index 3177b3cd..ab059540 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -1,17 +1,11 @@ -""" -Script combines the model yaml with the compile, platform, and experiment yamls. -""" - -## TO-DO: -# - figure out way to safe_load (yaml_loader=yaml.SafeLoader?) -# - condition where there are multiple pp and analysis yamls - import os import shutil from pathlib import Path import click import yaml +import fre.yamltools.combine_compile as cc +import fre.yamltools.combine_pp as cp def join_constructor(loader, node): """ @@ -21,406 +15,6 @@ def join_constructor(loader, node): seq = loader.construct_sequence(node) return ''.join([str(i) for i in seq]) -def yaml_load(yamlfile): - """ - Load the yamlfile - """ - with open(yamlfile, 'r') as yf: - y = yaml.load(yf,Loader=yaml.Loader) - - return y - -def get_compile_paths(mainyaml_dir,comb): - """ - Extract compile and platform paths from model yaml - """ - comb_model=yaml_load(comb) - - # set platform yaml filepath - if comb_model["build"]["platformYaml"] is not None: - if Path(os.path.join(mainyaml_dir,comb_model["build"]["platformYaml"])).exists(): - py=comb_model["build"]["platformYaml"] - py_path=Path(os.path.join(mainyaml_dir,py)) - else: - raise ValueError("Incorrect platform yaml path given; does not exist.") - else: - py_path=None - raise ValueError("No platform yaml path given!") - - # set compile yaml filepath - if comb_model["build"]["compileYaml"] is not None: - if Path(os.path.join(mainyaml_dir,comb_model["build"]["compileYaml"])).exists(): - cy=comb_model["build"]["compileYaml"] - cy_path=Path(os.path.join(mainyaml_dir,cy)) - else: - raise ValueError("Incorrect compile yaml path given; does not exist.") - else: - cy_path=None - raise ValueError("No compile yaml path given!") - - return (py_path,cy_path) - -def experiment_check(mainyaml_dir,comb,experiment): - """ - Check that the experiment given is an experiment listed in the model yaml. - Extract experiment specific information and file paths. - Arguments: - mainyaml_dir : model yaml file - comb : combined yaml file name - experiment : experiment name - """ - comb_model=yaml_load(comb) - - # Check if exp name given is actually valid experiment listed in combined yaml - exp_list = [] - for i in comb_model.get("experiments"): - exp_list.append(i.get("name")) - - if experiment not in exp_list: - raise NameError(f"{experiment} is not in the list of experiments") - - # Extract compile yaml path for exp. provided - # if experiment matches name in list of experiments in yaml, extract file path - for i in comb_model.get("experiments"): - if experiment == i.get("name"): - expyaml=i.get("pp") - analysisyaml=i.get("analysis") - - if expyaml is not None: - ey_path=[] - for e in expyaml: - if Path(os.path.join(mainyaml_dir,e)).exists(): - ey=Path(os.path.join(mainyaml_dir,e)) - ey_path.append(ey) - else: - raise ValueError(f"Experiment yaml path given ({e}) does not exist.") - else: - raise ValueError("No experiment yaml path given!") - - if analysisyaml is not None: - ay_path=[] - for a in analysisyaml: - # prepend the directory containing the yaml - if Path(os.path.join(mainyaml_dir, a)).exists(): - ay=Path(os.path.join(mainyaml_dir,a)) - ay_path.append(ay) - else: - raise ValueError("Incorrect analysis yaml path given; does not exist.") - else: - ay_path=None - - return (ey_path,ay_path) - -########################################################################################### -## COMPILE CLASS ## -class init_compile_yaml(): - """ class holding routines for initalizing compilation yamls """ - def __init__(self,yamlfile,platform,target): - """ - Process to combine yamls applicable to compilation - """ - self.yml = yamlfile - self.name = yamlfile.split(".")[0] - self.namenopath = self.name.split("/")[-1].split(".")[0] - self.platform = platform - self.target = target - - # Register tag handler - yaml.add_constructor('!join', join_constructor) - - # Path to the main model yaml - self.mainyaml_dir = os.path.dirname(self.yml) - - # Name of the combined yaml - base_name=f"combined-{self.namenopath}.yaml" - if len(self.mainyaml_dir) == 0: - self.combined = base_name - else: - self.combined = f"{self.mainyaml_dir}/{base_name}" - - print("Combining yaml files: ") - - def combine_model(self): - """ - Create the combined.yaml and merge it with the model yaml - """ - # copy model yaml info into combined yaml - with open(self.combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{self.name}"\n') - f1.write(f'platform: &platform "{self.platform}"\n') - f1.write(f'target: &target "{self.target}"\n\n') - try: - with open(self.yml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - except Exception as exc: - raise FileNotFoundError(f'{self.yml} not found') from exc - print(f" model yaml: {self.yml}") - - def combine_compile(self): - """ - Combine compile yaml with the defined combined.yaml - """ - # Get compile info - (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - - # copy compile yaml info into combined yaml - if cy_path is not None: - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(cy_path,'r',encoding='UTF-8') as f2: - f1.write("\n### COMPILE INFO ###\n") - shutil.copyfileobj(f2,f1) - print(f" compile yaml: {cy_path}") - - def combine_platforms(self): - """ - Combine platforms yaml with the defined combined.yaml - """ - # Get compile info - (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) - - # combine platform yaml - if py_path is not None: - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(py_path,'r',encoding='UTF-8') as f2: - f1.write("\n### PLATFORM INFO ###\n") - shutil.copyfileobj(f2,f1) - print(f" platforms yaml: {py_path}") - - def clean_yaml(self): - """ - Clean the yaml; remove unnecessary sections in - final combined yaml. - """ - # Load the fully combined yaml - full_yaml = yaml_load(self.combined) - - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] - - with open(self.combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) - - print(f"Combined yaml located here: {os.path.abspath(self.combined)}") - return self.combined - -########################################################################################### -## PP CLASS ## -class init_pp_yaml(): - """ class holding routines for initalizing post-processing yamls """ - def __init__(self,yamlfile,experiment,platform,target): - """ - Process to combine the applicable yamls for post-processing - """ - self.yml = yamlfile - self.name = experiment - self.platform = platform - self.target = target - - # Regsiter tag handler - yaml.add_constructor('!join', join_constructor) - - # Path to the main model yaml - self.mainyaml_dir = os.path.dirname(self.yml) - - # Name of the combined yaml - self.combined=f"combined-{self.name}.yaml" - - print("Combining yaml files: ") - - def combine_model(self): - """ - Create the combined.yaml and merge it with the model yaml - """ - # copy model yaml info into combined yaml - with open(self.combined,'w+',encoding='UTF-8') as f1: - f1.write(f'name: &name "{self.name}"\n') - f1.write(f'platform: &platform "{self.platform}"\n') - f1.write(f'target: &target "{self.target}"\n\n') - try: - with open(self.yml,'r',encoding='UTF-8') as f2: - f1.write("### MODEL YAML SETTINGS ###\n") - shutil.copyfileobj(f2,f1) - except Exception as exc: - raise FileNotFoundError(f'{self.yml} not found') from exc - print(f" model yaml: {self.yml}") - - def combine_experiment(self): - """ - Combine experiment yamls with the defined combined.yaml. - If more than 1 pp yaml defined, return a list of paths. - """ - # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) - - ## COMBINE EXPERIMENT YAML INFO - # If only 1 pp yaml defined, combine with model yaml - if ey_path is not None and len(ey_path) == 1: - #expyaml_path = os.path.join(mainyaml_dir, i) - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(ey_path[0],'r',encoding='UTF-8') as f2: - #copy expyaml into combined - shutil.copyfileobj(f2,f1) - print(f" experiment yaml: {ey_path[0]}") - - # If more than 1 pp yaml listed, create an intermediate yaml folder to combine - # each model and pp yaml into own combined yaml file - # (Must be done for aliases defined) - elif ey_path is not None and len(ey_path) > 1: - pp_yamls = [] - for i in ey_path: - pp_exp = str(i).rsplit('/', maxsplit=1)[-1] - - #create yamlfiles in folder - cwd=os.getcwd() - tmp_yaml_folder = os.path.join(cwd,"model_x_exp_yamls") - os.makedirs(tmp_yaml_folder, exist_ok=True) - shutil.copy(self.combined, os.path.join(tmp_yaml_folder,f"combined-{pp_exp}")) - with open(os.path.join(tmp_yaml_folder,f"combined-{pp_exp}"),'a', - encoding='UTF-8') as f1: - with open(i,'r',encoding='UTF-8') as f2: - #copy expyaml into combined - shutil.copyfileobj(f2,f1) - pp_yamls.append(os.path.join(tmp_yaml_folder,f"combined-{pp_exp}")) - - return pp_yamls - - def combine_analysis(self): - """ - Combine analysis yamls with the defined combined.yaml - If more than 1 analysis yaml defined, return a list of paths. - """ - # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) - - ## COMBINE ANALYSIS YAML INFO - # If only 1 analysis yaml listed, combine with model yaml - if ay_path is not None and len(ay_path) == 1: - with open(self.combined,'a',encoding='UTF-8') as f1: - with open(ay_path[0],'r',encoding='UTF-8') as f2: - #copy expyaml into combined - shutil.copyfileobj(f2,f1) - - # If more than 1 analysis yaml listed, create an intermediate yaml folder to combine - # each model and analysis yaml into own combined yaml file - elif ay_path is not None and len(ay_path) > 1: - analysis_yamls=[] - for i in ay_path: - analysis = str(i).rsplit('/', maxsplit=1)[-1] - - #create yamlfiles in folder - cwd=os.getcwd() - tmp_yaml_folder = os.path.join(cwd,"model_x_analysis_yamls") - os.makedirs(tmp_yaml_folder, exist_ok=True) - - shutil.copy(self.combined, os.path.join(tmp_yaml_folder,f"combined-{analysis}")) - with open(os.path.join(tmp_yaml_folder,f"combined-{analysis}"),'a', - encoding='UTF-8') as f1: - with open(i,'r',encoding='UTF-8') as f2: - #copy expyaml into combined - shutil.copyfileobj(f2,f1) - - analysis_yamls.append(os.path.join(tmp_yaml_folder,f"combined-{analysis}")) - - return analysis_yamls - - def merge_multiple_yamls(self, pp_list, analysis_list): - """ - Merge separately combined post-processing and analysis - yamls into fully combined yaml (without overwriting). - """ - result = {} - - # If more than one post-processing yaml is listed, update - # dictionary with content from 1st yaml in list - # Looping through rest of yamls listed, compare key value pairs. - # If instance of key is a dictionary in both result and loaded - # yamlfile, update the key in result to - # include the loaded yaml file's value. - if pp_list is not None and len(pp_list) > 1: - result.update(yaml_load(pp_list[0])) - for i in pp_list[1:]: - yf = yaml_load(i) - for key in result: - if key in yf: - if isinstance(result[key],dict) and isinstance(yf[key],dict): - if key == "postprocess": - result[key]["components"] = yf[key]["components"] + result[key]["components"] - # If only one post-processing yaml listed, do nothing - # (already combined in 'combine_experiments' function) - elif pp_list is not None and len(pp_list) == 1: - pass - - # If more than one analysis yaml is listed, update dictionary with content from 1st yaml - # Looping through rest of yamls listed, compare key value pairs. - # If instance of key is a dictionary in both result and loaded yamlfile, update the key - # in result to include the loaded yaml file's value. - if analysis_list is not None and len(analysis_list) > 1: - result.update(yaml_load(analysis_list[0])) - for i in analysis_list[1:]: - yf = yaml_load(i) - for key in result: - if key in yf: - if isinstance(result[key],dict) and isinstance(yf[key],dict): - if key == "analysis": - result[key] = yf[key] | result[key] - # If only one analysis yaml listed, do nothing - # (already combined in 'combine_analysis' function) - elif analysis_list is not None and len(analysis_list) == 1: - pass - - # Dump the updated result dictionary back into the final combined yaml file - with open(self.combined,'w',encoding='UTF-8') as f: - yaml.safe_dump(result,f,default_flow_style=False,sort_keys=False) - if pp_list is not None: - for i in pp_list: - exp = str(i).rsplit('/', maxsplit=1)[-1] - print(f" experiment yaml: {exp}") - if analysis_list is not None: - for i in analysis_list: - analysis = str(i).rsplit('/', maxsplit=1)[-1] - print(f" analysis yaml: {analysis}") - - def remove_tmp_yamlfiles(self, exp_yamls, analysis_yamls): - """ - Clean up separately created model/pp experiment and - model/analysis yamls. They are used for final combined - yaml but not needed separately. - """ - # Remove intermediate model_x_exp_yamls folder if it is not empty - if exp_yamls is not None and Path(exp_yamls[0]).exists(): - shutil.rmtree(os.path.dirname(exp_yamls[0])) - # Remove intermediate model_x_analysis_yamls if not empty - if analysis_yamls is not None and Path(analysis_yamls[0]).exists(): - shutil.rmtree(os.path.dirname(analysis_yamls[0])) - - def clean_yaml(self): - """ - Clean the yaml; remove unnecessary sections in - final combined yaml. - """ - # Load the fully combined yaml - full_yaml = yaml_load(self.combined) - - # Clean the yaml - # If keys exists, delete: - keys_clean=["fre_properties", "shared", "experiments"] - for kc in keys_clean: - if kc in full_yaml.keys(): - del full_yaml[kc] - - # Dump cleaned dictionary back into combined yaml file - with open(self.combined,'w') as f: - yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) - - print(f"Combined yaml located here: {os.path.abspath(self.combined)}") - return self.combined - -########################################################################################### ## Functions to combine the yaml files ## def get_combined_compileyaml(comb): """ @@ -428,37 +22,30 @@ def get_combined_compileyaml(comb): Arguments: comb : combined yaml object """ - # Merge model into combined file - comb.combine_model() - # Merge compile.yaml into combined file - comb.combine_compile() - # Merge platforms.yaml into combined file - full_combined = comb.combine_platforms() - # Clean the yaml - full_combined = comb.clean_yaml() + print("Combining yaml files into one dictionary: ") + try: + (yaml_content, loaded_yaml)=comb.combine_model() + except: + raise ValueError("uh oh") - return full_combined + # Merge compile into combined file to create updated yaml_content/yaml + try: + (yaml_content, loaded_yaml) = comb.combine_compile(yaml_content, loaded_yaml) + except: + raise ValueError("uh oh again") -def combined_compile_existcheck(combined,yml,platform,target): - """ - Checks for if combined compile yaml exists already. - If not, combine model, compile, and platform yamls. - """ - cd = Path.cwd() - combined_path=os.path.join(cd,combined) + # Merge platforms.yaml into combined file + try: + (yaml_content,loaded_yaml) = comb.combine_platforms(yaml_content, loaded_yaml) + except: + raise ValueError("uh oh one more time") - # Combine model, compile, and platform yamls - # If fre yammltools combine-yamls tools was used, the combined yaml should exist - if Path(combined_path).exists(): - full_combined = combined_path - print("\nNOTE: Yamls previously merged.") - else: - comb = init_compile_yaml(yml,platform,target) - full_combined = get_combined_compileyaml(comb) + print(yaml_content) - return full_combined + # Clean the yaml + cleaned_yaml = comb.clean_yaml(yaml_content, loaded_yaml) + return cleaned_yaml -########################################################################################### def get_combined_ppyaml(comb): """ Combine the model, experiment, and analysis yamls @@ -466,43 +53,36 @@ def get_combined_ppyaml(comb): comb : combined yaml object """ # Merge model into combined file - comb.combine_model() + (new_dict,new_comb) = comb.combine_model() # Merge pp experiment yamls into combined file - comb_pp = comb.combine_experiment() + comb_pp_updated_list = comb.combine_experiment(new_dict,new_comb) # Merge analysis yamls, if defined, into combined file - comb_analysis = comb.combine_analysis() + comb_analysis_updated_list = comb.combine_analysis(new_dict,new_comb) # Merge model/pp and model/analysis yamls if more than 1 is defined # (without overwriting the yaml) - comb.merge_multiple_yamls(comb_pp, comb_analysis) - # Remove separate combined pp yaml files - comb.remove_tmp_yamlfiles(comb_pp, comb_analysis) - # Clean the yaml - full_combined = comb.clean_yaml() - - return full_combined + comb.merge_multiple_yamls(comb_pp_updated_list, comb_analysis_updated_list) +# # Remove separate combined pp yaml files +# comb.remove_tmp_yamlfiles(comb_pp, comb_analysis) +# # Clean the yaml +# full_combined = comb.clean_yaml() +# +# return full_combined -########################################################################################### def consolidate_yamls(yamlfile,experiment,platform,target,use): """ Depending on `use` argument passed, either create the final combined yaml for compilation or post-processing """ - # Regsiter tag handler - yaml.add_constructor('!join', join_constructor) - - # Path to the main model yaml - mainyaml_dir = os.path.dirname(yamlfile) - if use == "compile": - combined = init_compile_yaml(yamlfile, platform, target) + combined = cc.init_compile_yaml(yamlfile, platform, target, join_constructor) # Create combined compile yaml get_combined_compileyaml(combined) elif use =="pp": - combined = init_pp_yaml(yamlfile,experiment,platform,target) + combined = cp.init_pp_yaml(yamlfile, experiment, platform, target, join_constructor) # Create combined pp yaml get_combined_ppyaml(combined) else: - raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") + raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") @click.command() def _consolidate_yamls(yamlfile,experiment,platform,target,use): From 9d005b62277222790da65c9a8e1bb044b7f299af Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 11 Feb 2025 16:34:01 -0500 Subject: [PATCH 02/24] #346 Add in cleaning the yaml info --- fre/yamltools/combine_compile.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/fre/yamltools/combine_compile.py b/fre/yamltools/combine_compile.py index 816fcad5..3fc2f88e 100644 --- a/fre/yamltools/combine_compile.py +++ b/fre/yamltools/combine_compile.py @@ -51,7 +51,7 @@ def combine_model(self): # Return the combined string and loaded yaml print(f" model yaml: {self.yml}") - return (yaml_content,yml) + return (yaml_content, yml) def combine_compile(self,yaml_content,loaded_yaml): """ @@ -101,16 +101,21 @@ def combine_platforms(self, yaml_content, loaded_yaml): print(f" platforms yaml: {py_path}") return (yaml_content, yml) - def clean_yaml(self, yaml_content, loaded_yaml): + def clean_yaml(self, yaml_content): """ Clean the yaml; remove unnecessary sections in final combined yaml. """ + # Load the yaml + yml=yaml.load(yaml_content, Loader=yaml.Loader) + # Clean the yaml # If keys exists, delete: keys_clean=["fre_properties", "shared", "experiments"] for kc in keys_clean: - if kc in loaded_yaml.keys(): - del loaded_yaml[kc] + if kc in yml.keys(): + del yml[kc] + + cleaned_yml = yaml.safe_dump(yml,default_flow_style=False,sort_keys=False) - return loaded_yaml + return cleaned_yml From 5f59da1e6c1450b52ea8e16fb4362d2c671730a8 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 11 Feb 2025 16:34:52 -0500 Subject: [PATCH 03/24] #346 Fix functions for merging yaml info for post-processing --- fre/yamltools/combine_pp.py | 166 ++++++++++++++++++------------------ 1 file changed, 85 insertions(+), 81 deletions(-) diff --git a/fre/yamltools/combine_pp.py b/fre/yamltools/combine_pp.py index bcf7e58f..4f3539dd 100644 --- a/fre/yamltools/combine_pp.py +++ b/fre/yamltools/combine_pp.py @@ -1,6 +1,7 @@ import os import yaml from pathlib import Path +import pprint def experiment_check(mainyaml_dir,experiment,loaded_yaml): """ @@ -71,114 +72,109 @@ def __init__(self,yamlfile,experiment,platform,target,join_constructor): # Path to the main model yaml self.mainyaml_dir = os.path.dirname(self.yml) -# # Name of the combined yaml -# self.combined=f"combined-{self.name}.yaml" -# -# print("Combining yaml files: ") - def combine_model(self): """ Create the combined.yaml and merge it with the model yaml """ - full = [] - full.append(f'name: &name "{self.name}"\n') - full.append(f'platform: &platform "{self.platform}"\n') - full.append(f'target: &target "{self.target}"\n') + # Define click options in string + yaml_content = (f'name: &name "{self.name}"\n' + f'platform: &platform "{self.platform}"\n' + f'target: &target "{self.target}"\n') + + # Read model yaml as string with open(self.yml,'r') as f: - content = f.readlines() + model_content = f.read() - f1 = full + content - f2="".join(f1) -# print(f2) + # Combine information as strings + yaml_content += model_content - yml=yaml.load(f2,Loader=yaml.Loader) - return (f1,yml) + # Load string as yaml + yml=yaml.load(yaml_content,Loader=yaml.Loader) - # print(f" model yaml: {self.yml}") + # Return the combined string and loaded yaml + print(f" model yaml: {self.yml}") + return (yaml_content, yml) - def combine_experiment(self,list1,yam): + def combine_experiment(self, yaml_content, loaded_yaml): """ Combine experiment yamls with the defined combined.yaml. If more than 1 pp yaml defined, return a list of paths. """ # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.name,yam) + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.name,loaded_yaml) - print(ey_path) + pp_yamls = [] ## COMBINE EXPERIMENT YAML INFO # If only 1 pp yaml defined, combine with model yaml if ey_path is not None and len(ey_path) == 1: #expyaml_path = os.path.join(mainyaml_dir, i) with open(ey_path,'r') as eyp: - content = eyp.readlines() + exp_content = eyp.read() - new_list = list1 + content - f2="".join(new_list) + exp_info = yaml_content + exp_content + pp_yamls.append(exp_info) + print(f" experiment yaml: {ey_path}") # If more than 1 pp yaml listed # (Must be done for aliases defined) elif ey_path is not None and len(ey_path) > 1: - pp_yamls = [] with open(ey_path[0],'r') as eyp0: - content = eyp0.readlines() - new_list1 = list1 + content - f2="".join(new_list1) - pp_yamls.append(new_list1) + exp_content = eyp0.read() #string + + exp_info = yaml_content + exp_content + pp_yamls.append([exp_info]) for i in ey_path[1:]: -# pp_exp = str(i).rsplit('/', maxsplit=1)[-1] with open(i,'r') as eyp: - content = eyp.readlines() + exp_content = eyp.read() + + exp_info_i = yaml_content + exp_content + pp_yamls.append([exp_info_i]) - new_list_i = list1 + content - f3="".join(new_list_i) - pp_yamls.append(new_list_i) -# print(pp_yamls) -# quit() return pp_yamls - def combine_analysis(self,list2,yam): + def combine_analysis(self,yaml_content,loaded_yaml): """ Combine analysis yamls with the defined combined.yaml If more than 1 analysis yaml defined, return a list of paths. """ # Experiment Check - (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.name,yam) + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.name,loaded_yaml) + analysis_yamls = [] ## COMBINE EXPERIMENT YAML INFO # If only 1 pp yaml defined, combine with model yaml if ay_path is not None and len(ay_path) == 1: #expyaml_path = os.path.join(mainyaml_dir, i) with open(ay_path,'r') as ayp: - content = ayp.readlines() + analysis_content = ayp.read() + + analysis_info = yaml_content + analysis_content + analysis_yamls.append(analysis_info) + print(f" analysis yaml: {ay_path}") - new_list = list2 + content - f2="".join(new_list) -# #print(f2) -# # If more than 1 pp yaml listed # (Must be done for aliases defined) elif ay_path is not None and len(ay_path) > 1: - analysis_yamls = [] with open(ay_path[0],'r') as ayp0: - content = ayp0.readlines() - new_list2 = list2 + content - #f2="".join(new_list2) - analysis_yamls.append(new_list2) + analysis_content = ayp0.read() + + analysis_info = yaml_content + analysis_content + analysis_yamls.append([analysis_info]) for i in ay_path[1:]: with open(i,'r') as ayp: - content = ayp.readlines() + analysis_content = ayp.read() + + analysis_info_i = yaml_content + analysis_content + analysis_yamls.append([analysis_info_i]) - new_list_i = list2 + content - f3="".join(new_list_i) - analysis_yamls.append(new_list_i) return analysis_yamls def merge_multiple_yamls(self, pp_list, analysis_list): """ Merge separately combined post-processing and analysis - yamls into fully combined yaml (without overwriting). + yamls into fully combined yaml (without overwriting like sections). """ result = {} @@ -189,11 +185,14 @@ def merge_multiple_yamls(self, pp_list, analysis_list): # yamlfile, update the key in result to # include the loaded yaml file's value. if pp_list is not None and len(pp_list) > 1: - newnewnew = "".join(pp_list[0]) - result.update(yaml.load(newnewnew,Loader=yaml.Loader)) + yml_pp = "".join(pp_list[0]) + result.update(yaml.load(yml_pp,Loader=yaml.Loader)) + #print(f" experiment yaml: {exp}") +# print(pp_list[0]) + for i in pp_list[1:]: - morenew = "".join(i) - yf = yaml.load(morenew,Loader=yaml.Loader) + uhm = "".join(i) + yf = yaml.load(uhm,Loader=yaml.Loader) for key in result: if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): @@ -209,11 +208,13 @@ def merge_multiple_yamls(self, pp_list, analysis_list): # If instance of key is a dictionary in both result and loaded yamlfile, update the key # in result to include the loaded yaml file's value. if analysis_list is not None and len(analysis_list) > 1: - new4 = "".join(analysis_list[0]) - result.update(yaml.load(new4,Loader=yaml.Loader)) + yml_analysis = "".join(analysis_list[0]) + result.update(yaml.load(yml_analysis,Loader=yaml.Loader)) + for i in analysis_list[1:]: - more_new4 = "".join(i) - yf = yaml.load(more_new4,Loader=yaml.Loader) + #more_new4 = "".join(i) + uhm_again = "".join(i) + yf = yaml.load(uhm_again,Loader=yaml.Loader) for key in result: if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): @@ -224,26 +225,29 @@ def merge_multiple_yamls(self, pp_list, analysis_list): elif analysis_list is not None and len(analysis_list) == 1: pass - print(result) +# if pp_list is not None: +# for i in pp_list: +# exp = str(i).rsplit('/', maxsplit=1)[-1] +# print(f" experiment yaml: {exp}") +# if analysis_list is not None: +# for i in analysis_list: +# analysis = str(i).rsplit('/', maxsplit=1)[-1] +# print(f" analysis yaml: {analysis}") -# def clean_yaml(self): -# """ -# Clean the yaml; remove unnecessary sections in -# final combined yaml. -# """ -# # Load the fully combined yaml -# full_yaml = yaml_load(self.combined) -# -# # Clean the yaml -# # If keys exists, delete: -# keys_clean=["fre_properties", "shared", "experiments"] -# for kc in keys_clean: -# if kc in full_yaml.keys(): -# del full_yaml[kc] -# -# # Dump cleaned dictionary back into combined yaml file -# with open(self.combined,'w') as f: -# yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) -# -# print(f"Combined yaml located here: {os.path.abspath(self.combined)}") -# return self.combined + return result + + def clean_yaml(self,yml_dict): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in yml_dict.keys(): + del yml_dict[kc] + + # Dump cleaned dictionary back into combined yaml file + cleaned_yaml = yaml.safe_dump(yml_dict,default_flow_style=False,sort_keys=False) + return cleaned_yaml From 213dde2c70860cad9272a598431e4a96b5598907 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 11 Feb 2025 16:35:43 -0500 Subject: [PATCH 04/24] #346 Add in an `output` option - `output` option would dump final, cleaned yaml info to a file --- fre/yamltools/freyamltools.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/fre/yamltools/freyamltools.py b/fre/yamltools/freyamltools.py index 97a06721..2575f9c4 100644 --- a/fre/yamltools/freyamltools.py +++ b/fre/yamltools/freyamltools.py @@ -1,7 +1,7 @@ ''' fre yamltools ''' import click -from .combine_yamls import _consolidate_yamls +from fre.yamltools import combine_yamls_script @click.group(help=click.style(" - access fre yamltools subcommands", fg=(202,177,95))) def yamltools_cli(): @@ -23,22 +23,24 @@ def yamltools_cli(): help="Platform name", required=True) @click.option("-t", - "--target", - type=str, - help="Target name", - required=True) + "--target", + type=str, + help="Target name", + required=True) @click.option("--use", type=click.Choice(['compile','pp']), help="Process user is combining yamls for. Can pass 'compile' or 'pp'", required=True) -@click.pass_context -def combine_yamls(context,yamlfile,experiment,platform,target,use): - # pylint: disable=unused-argument +@click.option("-o", + "--output", + type=str, + help="Output") +def combine_yamls(yamlfile,experiment,platform,target,use,output): """ - Combine the model yaml with the compile, platform, experiment, and analysis yamls """ - context.forward(_consolidate_yamls) + combine_yamls_script.consolidate_yamls(yamlfile,experiment,platform,target,use,output) if __name__ == "__main__": yamltools_cli() From c13cf6e7ba8111d72927fae732a169be1ec212da Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 11 Feb 2025 16:36:49 -0500 Subject: [PATCH 05/24] #346 Update combine_yamls script --- ...mbine_yamls.py => combine_yamls_script.py} | 80 ++++++++++++------- 1 file changed, 49 insertions(+), 31 deletions(-) rename fre/yamltools/{combine_yamls.py => combine_yamls_script.py} (53%) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls_script.py similarity index 53% rename from fre/yamltools/combine_yamls.py rename to fre/yamltools/combine_yamls_script.py index ab059540..5e097243 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls_script.py @@ -6,6 +6,7 @@ import yaml import fre.yamltools.combine_compile as cc import fre.yamltools.combine_pp as cp +import pprint def join_constructor(loader, node): """ @@ -22,7 +23,6 @@ def get_combined_compileyaml(comb): Arguments: comb : combined yaml object """ - print("Combining yaml files into one dictionary: ") try: (yaml_content, loaded_yaml)=comb.combine_model() except: @@ -40,10 +40,8 @@ def get_combined_compileyaml(comb): except: raise ValueError("uh oh one more time") - print(yaml_content) - # Clean the yaml - cleaned_yaml = comb.clean_yaml(yaml_content, loaded_yaml) + cleaned_yaml = comb.clean_yaml(yaml_content) return cleaned_yaml def get_combined_ppyaml(comb): @@ -52,23 +50,36 @@ def get_combined_ppyaml(comb): Arguments: comb : combined yaml object """ - # Merge model into combined file - (new_dict,new_comb) = comb.combine_model() - # Merge pp experiment yamls into combined file - comb_pp_updated_list = comb.combine_experiment(new_dict,new_comb) - # Merge analysis yamls, if defined, into combined file - comb_analysis_updated_list = comb.combine_analysis(new_dict,new_comb) - # Merge model/pp and model/analysis yamls if more than 1 is defined - # (without overwriting the yaml) - comb.merge_multiple_yamls(comb_pp_updated_list, comb_analysis_updated_list) -# # Remove separate combined pp yaml files -# comb.remove_tmp_yamlfiles(comb_pp, comb_analysis) -# # Clean the yaml -# full_combined = comb.clean_yaml() -# -# return full_combined - -def consolidate_yamls(yamlfile,experiment,platform,target,use): + try: + # Merge model into combined file + (yaml_content, loaded_yaml) = comb.combine_model() + except: + print("pp uh oh 1") + + try: + # Merge pp experiment yamls into combined file + comb_pp_updated_list = comb.combine_experiment(yaml_content, loaded_yaml) + except: + raise ValueError("pp uh oh 2") + + try: + # Merge analysis yamls, if defined, into combined file + comb_analysis_updated_list = comb.combine_analysis(yaml_content, loaded_yaml) + except: + raise ValueError("uh oh 3") + + try: + # Merge model/pp and model/analysis yamls if more than 1 is defined + # (without overwriting the yaml) + full_combined = comb.merge_multiple_yamls(comb_pp_updated_list, comb_analysis_updated_list) + except: + raise ValueError("uh oh 4") + + # Clean the yaml + cleaned_yaml = comb.clean_yaml(full_combined) + return cleaned_yaml + +def consolidate_yamls(yamlfile,experiment,platform,target,use,output): """ Depending on `use` argument passed, either create the final combined yaml for compilation or post-processing @@ -76,22 +87,29 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use): if use == "compile": combined = cc.init_compile_yaml(yamlfile, platform, target, join_constructor) # Create combined compile yaml - get_combined_compileyaml(combined) + print("Combining yaml files into one dictionary: ") + + if output is None: + get_combined_compileyaml(combined) + else: + with open(output,'w') as out: + out.write(get_combined_compileyaml(combined)) + print(f"COMBINE OUT HERE: {os.path.abspath(output)}") + elif use =="pp": combined = cp.init_pp_yaml(yamlfile, experiment, platform, target, join_constructor) # Create combined pp yaml - get_combined_ppyaml(combined) + print("Combining yaml files into one dictionary: ") + + if output is None: + get_combined_ppyaml(combined) + else: + with open(output,'w') as out: + out.write(get_combined_ppyaml(combined)) + print(f"COMBINE OUT HERE: {os.path.abspath(output)}") else: raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") -@click.command() -def _consolidate_yamls(yamlfile,experiment,platform,target,use): - ''' - Wrapper script for calling yaml_combine - allows the decorated version - of the function to be separate from the undecorated version - ''' - return consolidate_yamls(yamlfile,experiment,platform,target,use) - # Use parseyaml function to parse created edits.yaml if __name__ == '__main__': consolidate_yamls() From 2ed50728092a4162270287eb248e8126b87140a9 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Tue, 11 Feb 2025 16:53:43 -0500 Subject: [PATCH 06/24] #346 Rename `combine_compile.py` and `combine_pp.py` --- fre/yamltools/combine_yamls_script.py | 10 +++++----- .../{combine_compile.py => compile_info_parser.py} | 0 fre/yamltools/{combine_pp.py => pp_info_parser.py} | 0 3 files changed, 5 insertions(+), 5 deletions(-) rename fre/yamltools/{combine_compile.py => compile_info_parser.py} (100%) rename fre/yamltools/{combine_pp.py => pp_info_parser.py} (100%) diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index 5e097243..5d1816d5 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -4,8 +4,8 @@ from pathlib import Path import click import yaml -import fre.yamltools.combine_compile as cc -import fre.yamltools.combine_pp as cp +import fre.yamltools.compile_info_parser as cip +import fre.yamltools.pp_info_parser as ppip import pprint def join_constructor(loader, node): @@ -54,7 +54,7 @@ def get_combined_ppyaml(comb): # Merge model into combined file (yaml_content, loaded_yaml) = comb.combine_model() except: - print("pp uh oh 1") + raise ValueError("pp uh oh 1") try: # Merge pp experiment yamls into combined file @@ -85,7 +85,7 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): combined yaml for compilation or post-processing """ if use == "compile": - combined = cc.init_compile_yaml(yamlfile, platform, target, join_constructor) + combined = cip.init_compile_yaml(yamlfile, platform, target, join_constructor) # Create combined compile yaml print("Combining yaml files into one dictionary: ") @@ -97,7 +97,7 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): print(f"COMBINE OUT HERE: {os.path.abspath(output)}") elif use =="pp": - combined = cp.init_pp_yaml(yamlfile, experiment, platform, target, join_constructor) + combined = ppip.init_pp_yaml(yamlfile, experiment, platform, target, join_constructor) # Create combined pp yaml print("Combining yaml files into one dictionary: ") diff --git a/fre/yamltools/combine_compile.py b/fre/yamltools/compile_info_parser.py similarity index 100% rename from fre/yamltools/combine_compile.py rename to fre/yamltools/compile_info_parser.py diff --git a/fre/yamltools/combine_pp.py b/fre/yamltools/pp_info_parser.py similarity index 100% rename from fre/yamltools/combine_pp.py rename to fre/yamltools/pp_info_parser.py From e425faad6e584d583c64bb8bf89901b7986e5e24 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 13 Feb 2025 10:35:33 -0500 Subject: [PATCH 07/24] #346 Keep old `combine-yamls` tool - Multiple scripts rely/use this tool - PR would be too big if I tried to fix each one with this reworked tool --- fre/yamltools/combine_yamls.py | 520 +++++++++++++++++++++++++++++++++ 1 file changed, 520 insertions(+) create mode 100755 fre/yamltools/combine_yamls.py diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py new file mode 100755 index 00000000..c31a2fb8 --- /dev/null +++ b/fre/yamltools/combine_yamls.py @@ -0,0 +1,520 @@ +""" +Script combines the model yaml with the compile, platform, and experiment yamls. +""" + +## TO-DO: +# - figure out way to safe_load (yaml_loader=yaml.SafeLoader?) +# - condition where there are multiple pp and analysis yamls + +import os +import shutil + +from pathlib import Path +import click +import yaml + +def join_constructor(loader, node): + """ + Allows FRE properties defined + in main yaml to be concatenated. + """ + seq = loader.construct_sequence(node) + return ''.join([str(i) for i in seq]) + +def yaml_load(yamlfile): + """ + Load the yamlfile + """ + with open(yamlfile, 'r') as yf: + y = yaml.load(yf,Loader=yaml.Loader) + + return y + +def get_compile_paths(mainyaml_dir,comb): + """ + Extract compile and platform paths from model yaml + """ + comb_model=yaml_load(comb) + + # set platform yaml filepath + if comb_model["build"]["platformYaml"] is not None: + if Path(os.path.join(mainyaml_dir,comb_model["build"]["platformYaml"])).exists(): + py=comb_model["build"]["platformYaml"] + py_path=Path(os.path.join(mainyaml_dir,py)) + else: + raise ValueError("Incorrect platform yaml path given; does not exist.") + else: + py_path=None + raise ValueError("No platform yaml path given!") + + # set compile yaml filepath + if comb_model["build"]["compileYaml"] is not None: + if Path(os.path.join(mainyaml_dir,comb_model["build"]["compileYaml"])).exists(): + cy=comb_model["build"]["compileYaml"] + cy_path=Path(os.path.join(mainyaml_dir,cy)) + else: + raise ValueError("Incorrect compile yaml path given; does not exist.") + else: + cy_path=None + raise ValueError("No compile yaml path given!") + + return (py_path,cy_path) + +def experiment_check(mainyaml_dir,comb,experiment): + """ + Check that the experiment given is an experiment listed in the model yaml. + Extract experiment specific information and file paths. + Arguments: + mainyaml_dir : model yaml file + comb : combined yaml file name + experiment : experiment name + """ + comb_model=yaml_load(comb) + + # Check if exp name given is actually valid experiment listed in combined yaml + exp_list = [] + for i in comb_model.get("experiments"): + exp_list.append(i.get("name")) + + if experiment not in exp_list: + raise NameError(f"{experiment} is not in the list of experiments") + + # Extract compile yaml path for exp. provided + # if experiment matches name in list of experiments in yaml, extract file path + for i in comb_model.get("experiments"): + if experiment == i.get("name"): + expyaml=i.get("pp") + analysisyaml=i.get("analysis") + + if expyaml is not None: + ey_path=[] + for e in expyaml: + if Path(os.path.join(mainyaml_dir,e)).exists(): + ey=Path(os.path.join(mainyaml_dir,e)) + ey_path.append(ey) + else: + raise ValueError(f"Experiment yaml path given ({e}) does not exist.") + else: + raise ValueError("No experiment yaml path given!") + + if analysisyaml is not None: + ay_path=[] + for a in analysisyaml: + # prepend the directory containing the yaml + if Path(os.path.join(mainyaml_dir, a)).exists(): + ay=Path(os.path.join(mainyaml_dir,a)) + ay_path.append(ay) + else: + raise ValueError("Incorrect analysis yaml path given; does not exist.") + else: + ay_path=None + + return (ey_path,ay_path) + +########################################################################################### +## COMPILE CLASS ## +class init_compile_yaml(): + """ class holding routines for initalizing compilation yamls """ + def __init__(self,yamlfile,platform,target): + """ + Process to combine yamls applicable to compilation + """ + self.yml = yamlfile + self.name = yamlfile.split(".")[0] + self.namenopath = self.name.split("/")[-1].split(".")[0] + self.platform = platform + self.target = target + + # Register tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + base_name=f"combined-{self.namenopath}.yaml" + if len(self.mainyaml_dir) == 0: + self.combined = base_name + else: + self.combined = f"{self.mainyaml_dir}/{base_name}" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + try: + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + except Exception as exc: + raise FileNotFoundError(f'{self.yml} not found') from exc + print(f" model yaml: {self.yml}") + + def combine_compile(self): + """ + Combine compile yaml with the defined combined.yaml + """ + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) + + # copy compile yaml info into combined yaml + if cy_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(cy_path,'r',encoding='UTF-8') as f2: + f1.write("\n### COMPILE INFO ###\n") + shutil.copyfileobj(f2,f1) + print(f" compile yaml: {cy_path}") + + def combine_platforms(self): + """ + Combine platforms yaml with the defined combined.yaml + """ + # Get compile info + (py_path,cy_path) = get_compile_paths(self.mainyaml_dir,self.combined) + + # combine platform yaml + if py_path is not None: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(py_path,'r',encoding='UTF-8') as f2: + f1.write("\n### PLATFORM INFO ###\n") + shutil.copyfileobj(f2,f1) + print(f" platforms yaml: {py_path}") + + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) + + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] + + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.abspath(self.combined)}") + return self.combined + +########################################################################################### +## PP CLASS ## +class init_pp_yaml(): + """ class holding routines for initalizing post-processing yamls """ + def __init__(self,yamlfile,experiment,platform,target): + """ + Process to combine the applicable yamls for post-processing + """ + self.yml = yamlfile + self.name = experiment + self.platform = platform + self.target = target + + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + self.mainyaml_dir = os.path.dirname(self.yml) + + # Name of the combined yaml + self.combined=f"combined-{self.name}.yaml" + + print("Combining yaml files: ") + + def combine_model(self): + """ + Create the combined.yaml and merge it with the model yaml + """ + # copy model yaml info into combined yaml + with open(self.combined,'w+',encoding='UTF-8') as f1: + f1.write(f'name: &name "{self.name}"\n') + f1.write(f'platform: &platform "{self.platform}"\n') + f1.write(f'target: &target "{self.target}"\n\n') + try: + with open(self.yml,'r',encoding='UTF-8') as f2: + f1.write("### MODEL YAML SETTINGS ###\n") + shutil.copyfileobj(f2,f1) + except Exception as exc: + raise FileNotFoundError(f'{self.yml} not found') from exc + print(f" model yaml: {self.yml}") + + def combine_experiment(self): + """ + Combine experiment yamls with the defined combined.yaml. + If more than 1 pp yaml defined, return a list of paths. + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + + ## COMBINE EXPERIMENT YAML INFO + # If only 1 pp yaml defined, combine with model yaml + if ey_path is not None and len(ey_path) == 1: + #expyaml_path = os.path.join(mainyaml_dir, i) + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(ey_path[0],'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + print(f" experiment yaml: {ey_path[0]}") + + # If more than 1 pp yaml listed, create an intermediate yaml folder to combine + # each model and pp yaml into own combined yaml file + # (Must be done for aliases defined) + elif ey_path is not None and len(ey_path) > 1: + pp_yamls = [] + for i in ey_path: + pp_exp = str(i).rsplit('/', maxsplit=1)[-1] + + #create yamlfiles in folder + cwd=os.getcwd() + tmp_yaml_folder = os.path.join(cwd,"model_x_exp_yamls") + os.makedirs(tmp_yaml_folder, exist_ok=True) + shutil.copy(self.combined, os.path.join(tmp_yaml_folder,f"combined-{pp_exp}")) + with open(os.path.join(tmp_yaml_folder,f"combined-{pp_exp}"),'a', + encoding='UTF-8') as f1: + with open(i,'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + pp_yamls.append(os.path.join(tmp_yaml_folder,f"combined-{pp_exp}")) + + return pp_yamls + + def combine_analysis(self): + """ + Combine analysis yamls with the defined combined.yaml + If more than 1 analysis yaml defined, return a list of paths. + """ + # Experiment Check + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.combined,self.name) + + ## COMBINE ANALYSIS YAML INFO + # If only 1 analysis yaml listed, combine with model yaml + if ay_path is not None and len(ay_path) == 1: + with open(self.combined,'a',encoding='UTF-8') as f1: + with open(ay_path[0],'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + + # If more than 1 analysis yaml listed, create an intermediate yaml folder to combine + # each model and analysis yaml into own combined yaml file + elif ay_path is not None and len(ay_path) > 1: + analysis_yamls=[] + for i in ay_path: + analysis = str(i).rsplit('/', maxsplit=1)[-1] + + #create yamlfiles in folder + cwd=os.getcwd() + tmp_yaml_folder = os.path.join(cwd,"model_x_analysis_yamls") + os.makedirs(tmp_yaml_folder, exist_ok=True) + + shutil.copy(self.combined, os.path.join(tmp_yaml_folder,f"combined-{analysis}")) + with open(os.path.join(tmp_yaml_folder,f"combined-{analysis}"),'a', + encoding='UTF-8') as f1: + with open(i,'r',encoding='UTF-8') as f2: + #copy expyaml into combined + shutil.copyfileobj(f2,f1) + + analysis_yamls.append(os.path.join(tmp_yaml_folder,f"combined-{analysis}")) + + return analysis_yamls + + def merge_multiple_yamls(self, pp_list, analysis_list): + """ + Merge separately combined post-processing and analysis + yamls into fully combined yaml (without overwriting). + """ + result = {} + + # If more than one post-processing yaml is listed, update + # dictionary with content from 1st yaml in list + # Looping through rest of yamls listed, compare key value pairs. + # If instance of key is a dictionary in both result and loaded + # yamlfile, update the key in result to + # include the loaded yaml file's value. + if pp_list is not None and len(pp_list) > 1: + result.update(yaml_load(pp_list[0])) + for i in pp_list[1:]: + yf = yaml_load(i) + for key in result: + if key in yf: + if isinstance(result[key],dict) and isinstance(yf[key],dict): + if key == "postprocess": + if 'components' in result['postprocess']: + result['postprocess']["components"] += yf['postprocess']["components"] + else: + result['postprocess']["components"] = yf['postprocess']["components"] + # If only one post-processing yaml listed, do nothing + # (already combined in 'combine_experiments' function) + elif pp_list is not None and len(pp_list) == 1: + pass + + # If more than one analysis yaml is listed, update dictionary with content from 1st yaml + # Looping through rest of yamls listed, compare key value pairs. + # If instance of key is a dictionary in both result and loaded yamlfile, update the key + # in result to include the loaded yaml file's value. + if analysis_list is not None and len(analysis_list) > 1: + result.update(yaml_load(analysis_list[0])) + for i in analysis_list[1:]: + yf = yaml_load(i) + for key in result: + if key in yf: + if isinstance(result[key],dict) and isinstance(yf[key],dict): + if key == "analysis": + result[key] = yf[key] | result[key] + # If only one analysis yaml listed, do nothing + # (already combined in 'combine_analysis' function) + elif analysis_list is not None and len(analysis_list) == 1: + pass + + # Dump the updated result dictionary back into the final combined yaml file + with open(self.combined,'w',encoding='UTF-8') as f: + yaml.safe_dump(result,f,default_flow_style=False,sort_keys=False) + if pp_list is not None: + for i in pp_list: + exp = str(i).rsplit('/', maxsplit=1)[-1] + print(f" experiment yaml: {exp}") + if analysis_list is not None: + for i in analysis_list: + analysis = str(i).rsplit('/', maxsplit=1)[-1] + print(f" analysis yaml: {analysis}") + + def remove_tmp_yamlfiles(self, exp_yamls, analysis_yamls): + """ + Clean up separately created model/pp experiment and + model/analysis yamls. They are used for final combined + yaml but not needed separately. + """ + # Remove intermediate model_x_exp_yamls folder if it is not empty + if exp_yamls is not None and Path(exp_yamls[0]).exists(): + shutil.rmtree(os.path.dirname(exp_yamls[0])) + # Remove intermediate model_x_analysis_yamls if not empty + if analysis_yamls is not None and Path(analysis_yamls[0]).exists(): + shutil.rmtree(os.path.dirname(analysis_yamls[0])) + + def clean_yaml(self): + """ + Clean the yaml; remove unnecessary sections in + final combined yaml. + """ + # Load the fully combined yaml + full_yaml = yaml_load(self.combined) + + # Clean the yaml + # If keys exists, delete: + keys_clean=["fre_properties", "shared", "experiments"] + for kc in keys_clean: + if kc in full_yaml.keys(): + del full_yaml[kc] + + # Dump cleaned dictionary back into combined yaml file + with open(self.combined,'w') as f: + yaml.safe_dump(full_yaml,f,default_flow_style=False,sort_keys=False) + + print(f"Combined yaml located here: {os.path.abspath(self.combined)}") + return self.combined + +########################################################################################### +## Functions to combine the yaml files ## +def get_combined_compileyaml(comb): + """ + Combine the model, compile, and platform yamls + Arguments: + comb : combined yaml object + """ + # Merge model into combined file + comb.combine_model() + # Merge compile.yaml into combined file + comb.combine_compile() + # Merge platforms.yaml into combined file + full_combined = comb.combine_platforms() + # Clean the yaml + full_combined = comb.clean_yaml() + + return full_combined + +def combined_compile_existcheck(combined,yml,platform,target): + """ + Checks for if combined compile yaml exists already. + If not, combine model, compile, and platform yamls. + """ + cd = Path.cwd() + combined_path=os.path.join(cd,combined) + + # Combine model, compile, and platform yamls + # If fre yammltools combine-yamls tools was used, the combined yaml should exist + if Path(combined_path).exists(): + full_combined = combined_path + print("\nNOTE: Yamls previously merged.") + else: + comb = init_compile_yaml(yml,platform,target) + full_combined = get_combined_compileyaml(comb) + + return full_combined + +########################################################################################### +def get_combined_ppyaml(comb): + """ + Combine the model, experiment, and analysis yamls + Arguments: + comb : combined yaml object + """ + # Merge model into combined file + comb.combine_model() + # Merge pp experiment yamls into combined file + comb_pp = comb.combine_experiment() + # Merge analysis yamls, if defined, into combined file + comb_analysis = comb.combine_analysis() + # Merge model/pp and model/analysis yamls if more than 1 is defined + # (without overwriting the yaml) + comb.merge_multiple_yamls(comb_pp, comb_analysis) + # Remove separate combined pp yaml files + comb.remove_tmp_yamlfiles(comb_pp, comb_analysis) + # Clean the yaml + full_combined = comb.clean_yaml() + + return full_combined + +########################################################################################### +def consolidate_yamls(yamlfile,experiment,platform,target,use): + """ + Depending on `use` argument passed, either create the final + combined yaml for compilation or post-processing + """ + # Regsiter tag handler + yaml.add_constructor('!join', join_constructor) + + # Path to the main model yaml + mainyaml_dir = os.path.dirname(yamlfile) + + if use == "compile": + combined = init_compile_yaml(yamlfile, platform, target) + # Create combined compile yaml + get_combined_compileyaml(combined) + elif use =="pp": + combined = init_pp_yaml(yamlfile,experiment,platform,target) + # Create combined pp yaml + get_combined_ppyaml(combined) + else: + raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") + +@click.command() +def _consolidate_yamls(yamlfile,experiment,platform,target,use): + ''' + Wrapper script for calling yaml_combine - allows the decorated version + of the function to be separate from the undecorated version + ''' + return consolidate_yamls(yamlfile,experiment,platform,target,use) + +# Use parseyaml function to parse created edits.yaml +if __name__ == '__main__': + consolidate_yamls() From 12519a6fa1d13e07f38feb58ce37598d80bfbc97 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 13 Feb 2025 10:40:25 -0500 Subject: [PATCH 08/24] #346 Move print statements, return yaml dictionary instead --- fre/yamltools/combine_yamls_script.py | 10 ++++------ fre/yamltools/pp_info_parser.py | 9 ++++++--- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index 5d1816d5..d26ae942 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -75,8 +75,10 @@ def get_combined_ppyaml(comb): except: raise ValueError("uh oh 4") +# print(full_combined) # Clean the yaml cleaned_yaml = comb.clean_yaml(full_combined) +# print(cleaned_yaml) return cleaned_yaml def consolidate_yamls(yamlfile,experiment,platform,target,use,output): @@ -85,9 +87,7 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): combined yaml for compilation or post-processing """ if use == "compile": - combined = cip.init_compile_yaml(yamlfile, platform, target, join_constructor) - # Create combined compile yaml - print("Combining yaml files into one dictionary: ") + combined = cip.InitCompileYaml(yamlfile, platform, target, join_constructor) if output is None: get_combined_compileyaml(combined) @@ -97,9 +97,7 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): print(f"COMBINE OUT HERE: {os.path.abspath(output)}") elif use =="pp": - combined = ppip.init_pp_yaml(yamlfile, experiment, platform, target, join_constructor) - # Create combined pp yaml - print("Combining yaml files into one dictionary: ") + combined = ppip.InitPPYaml(yamlfile, experiment, platform, target, join_constructor) if output is None: get_combined_ppyaml(combined) diff --git a/fre/yamltools/pp_info_parser.py b/fre/yamltools/pp_info_parser.py index 4f3539dd..8453432d 100644 --- a/fre/yamltools/pp_info_parser.py +++ b/fre/yamltools/pp_info_parser.py @@ -55,7 +55,7 @@ def experiment_check(mainyaml_dir,experiment,loaded_yaml): return (ey_path,ay_path) ## PP CLASS ## -class init_pp_yaml(): +class InitPPYaml(): """ class holding routines for initalizing post-processing yamls """ def __init__(self,yamlfile,experiment,platform,target,join_constructor): """ @@ -72,6 +72,9 @@ def __init__(self,yamlfile,experiment,platform,target,join_constructor): # Path to the main model yaml self.mainyaml_dir = os.path.dirname(self.yml) + # Create combined pp yaml + print("Combining yaml files into one dictionary: ") + def combine_model(self): """ Create the combined.yaml and merge it with the model yaml @@ -249,5 +252,5 @@ def clean_yaml(self,yml_dict): del yml_dict[kc] # Dump cleaned dictionary back into combined yaml file - cleaned_yaml = yaml.safe_dump(yml_dict,default_flow_style=False,sort_keys=False) - return cleaned_yaml +# cleaned_yaml = yaml.safe_dump(yml_dict,default_flow_style=False,sort_keys=False) + return yml_dict From 703310c677662ee0e0d64215be617a094e3ea896 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 13 Feb 2025 10:45:46 -0500 Subject: [PATCH 09/24] #346 Use re-worked combine-yamls tool in `configure_script_yaml.py` --- fre/pp/configure_script_yaml.py | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index af4cc3dd..8e2a5c30 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -15,7 +15,8 @@ import yaml import metomi.rose.config -import fre.yamltools.combine_yamls as cy +import fre.yamltools.combine_yamls_script as cy +import fre.yamltools.pp_info_parser as ppip #################### def yaml_load(yamlfile): @@ -33,8 +34,8 @@ def validate_yaml(yamlfile): """ Using the schema.json file, the yaml format is validated. """ - # Load the yaml - yml = yaml_load(yamlfile) +# # Load the yaml +# yml = yaml_load(yamlfile) schema_dir = Path(__file__).resolve().parents[1] schema_path = os.path.join(schema_dir, 'gfdl_msd_schemas', 'FRE', 'fre_pp.json') @@ -45,7 +46,7 @@ def validate_yaml(yamlfile): # Validate yaml # If the yaml is not valid, the schema validation will raise errors and exit try: - validate(instance=yml,schema=schema) + validate(instance=yamlfile,schema=schema) print("\nCombined yaml VALID \n") except: raise ValueError("\nCombined yaml NOT VALID.\n") @@ -165,27 +166,32 @@ def yaml_info(yamlfile = None, experiment = None, platform = None, target = None rose_suite,rose_regrid,rose_remap = rose_init(e,p,t) # Combine model, experiment, and analysis yamls - comb = cy.init_pp_yaml(yml,e,p,t) + join_func = cy.join_constructor + comb = ppip.InitPPYaml(yml,e,p,t,join_func) full_combined = cy.get_combined_ppyaml(comb) +# print(full_combined) +# quit() + # Validate yaml validate_yaml(full_combined) - # Load the combined yaml - comb_pp_yaml = yaml_load(full_combined) + +# # Load the combined yaml +# comb_pp_yaml = yaml_load(full_combined) ## PARSE COMBINED YAML TO CREATE CONFIGS # Set rose-suite items - set_rose_suite(comb_pp_yaml,rose_suite) + set_rose_suite(full_combined,rose_suite) ####comb_pp_yaml,rose_suite) # Set regrid and remap rose app items - set_rose_apps(comb_pp_yaml,rose_regrid,rose_remap) + set_rose_apps(full_combined,rose_regrid,rose_remap) ####comb_pp_yaml,rose_regrid,rose_remap) - # write output files + # Write output files print("Writing output files...") cylc_dir = os.path.join(os.path.expanduser("~/cylc-src"), f"{e}__{p}__{t}") - outfile = os.path.join(cylc_dir, f"{e}.yaml") - shutil.copyfile(full_combined, outfile) - print(" " + outfile) +# outfile = os.path.join(cylc_dir, f"{e}.yaml") +# shutil.copyfile(full_combined, outfile) +# print(" " + outfile) dumper = metomi.rose.config.ConfigDumper() outfile = os.path.join(cylc_dir, "rose-suite.conf") From df7686f544d1d08a829e13b5134027bf4fcc06a0 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 13 Feb 2025 11:55:35 -0500 Subject: [PATCH 10/24] #346 Remove test for combined yaml - this file wouldn't exist anymore --- fre/pp/tests/test_configure_script_yaml.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 24232710..7d5be885 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -44,8 +44,7 @@ def test_configure_script(): os.environ["HOME"] = old_home # Check for configuration creation and final combined yaml - assert all([ Path(f"{OUT_DIR}/{EXPERIMENT}.yaml").exists(), - Path(f"{OUT_DIR}/rose-suite.conf").exists(), + assert all([ Path(f"{OUT_DIR}/rose-suite.conf").exists(), Path(f"{OUT_DIR}/app/regrid-xy/rose-app.conf").exists(), Path(f"{OUT_DIR}/app/remap-pp-components/rose-app.conf").exists() ]) From c28814a86e737676e3f9105e4d798a9fecee5dfe Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 13 Feb 2025 11:58:07 -0500 Subject: [PATCH 11/24] #346 combined file would not exist --- fre/tests/test_fre_pp_cli.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/fre/tests/test_fre_pp_cli.py b/fre/tests/test_fre_pp_cli.py index 6249ae5d..04ac94b0 100644 --- a/fre/tests/test_fre_pp_cli.py +++ b/fre/tests/test_fre_pp_cli.py @@ -94,9 +94,7 @@ def test_cli_fre_pp_configure_yaml_fail1(): "-p", "BAR", "-t", "BAZ", "-y", "BOO" ] ) - assert all( [ result.exit_code == 1, - isinstance(result.exception, FileNotFoundError ) - ] ) + assert result.exit_code == 1 #-- fre pp install From 6ffb6fd3cc1c5d49bc3439917b5b31f2cd243900 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Fri, 14 Feb 2025 13:42:12 -0500 Subject: [PATCH 12/24] #346 Provide output option for pp tools - outputs a combined yaml file for other tools to use/reference --- fre/pp/configure_script_yaml.py | 27 +++++++++----------- fre/yamltools/combine_yamls_script.py | 36 +++++++++++++++++---------- fre/yamltools/pp_info_parser.py | 21 ++++++++-------- 3 files changed, 46 insertions(+), 38 deletions(-) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index 8e2a5c30..c8ce2126 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -166,32 +166,29 @@ def yaml_info(yamlfile = None, experiment = None, platform = None, target = None rose_suite,rose_regrid,rose_remap = rose_init(e,p,t) # Combine model, experiment, and analysis yamls - join_func = cy.join_constructor - comb = ppip.InitPPYaml(yml,e,p,t,join_func) - full_combined = cy.get_combined_ppyaml(comb) + cylc_dir = os.path.join(os.path.expanduser("~/cylc-src"), f"{e}__{p}__{t}") + outfile = os.path.join(cylc_dir, f"{e}.yaml") -# print(full_combined) -# quit() + full_yamldict = cy.consolidate_yamls(yamlfile = yml, + experiment = e, + platform = p, + target = t, + use = "pp", + output = outfile) # Validate yaml - validate_yaml(full_combined) - -# # Load the combined yaml -# comb_pp_yaml = yaml_load(full_combined) + validate_yaml(full_yamldict) ## PARSE COMBINED YAML TO CREATE CONFIGS # Set rose-suite items - set_rose_suite(full_combined,rose_suite) ####comb_pp_yaml,rose_suite) + set_rose_suite(full_yamldict,rose_suite) ####comb_pp_yaml,rose_suite) # Set regrid and remap rose app items - set_rose_apps(full_combined,rose_regrid,rose_remap) ####comb_pp_yaml,rose_regrid,rose_remap) + set_rose_apps(full_yamldict,rose_regrid,rose_remap) ####comb_pp_yaml,rose_regrid,rose_remap) # Write output files print("Writing output files...") - cylc_dir = os.path.join(os.path.expanduser("~/cylc-src"), f"{e}__{p}__{t}") -# outfile = os.path.join(cylc_dir, f"{e}.yaml") -# shutil.copyfile(full_combined, outfile) -# print(" " + outfile) + print(" " + outfile) dumper = metomi.rose.config.ConfigDumper() outfile = os.path.join(cylc_dir, "rose-suite.conf") diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index d26ae942..f498286d 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -16,6 +16,13 @@ def join_constructor(loader, node): seq = loader.construct_sequence(node) return ''.join([str(i) for i in seq]) +def output_yaml(cleaned_yaml,experiment,output): + """ + """ + filename = output + with open(filename,'w') as out: + out.write(yaml.dump(cleaned_yaml,default_flow_style=False,sort_keys=False)) + ## Functions to combine the yaml files ## def get_combined_compileyaml(comb): """ @@ -44,7 +51,7 @@ def get_combined_compileyaml(comb): cleaned_yaml = comb.clean_yaml(yaml_content) return cleaned_yaml -def get_combined_ppyaml(comb): +def get_combined_ppyaml(comb,experiment,output=None): """ Combine the model, experiment, and analysis yamls Arguments: @@ -71,14 +78,19 @@ def get_combined_ppyaml(comb): try: # Merge model/pp and model/analysis yamls if more than 1 is defined # (without overwriting the yaml) - full_combined = comb.merge_multiple_yamls(comb_pp_updated_list, comb_analysis_updated_list) + full_combined = comb.merge_multiple_yamls(comb_pp_updated_list, comb_analysis_updated_list,loaded_yaml) except: raise ValueError("uh oh 4") -# print(full_combined) # Clean the yaml cleaned_yaml = comb.clean_yaml(full_combined) -# print(cleaned_yaml) + + # OUTPUT IF NEEDED + if output is not None: + output_yaml(cleaned_yaml,experiment,output) + else: + print("Combined yaml information saved as dictionary") + return cleaned_yaml def consolidate_yamls(yamlfile,experiment,platform,target,use,output): @@ -89,25 +101,23 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): if use == "compile": combined = cip.InitCompileYaml(yamlfile, platform, target, join_constructor) - if output is None: + if output is False: get_combined_compileyaml(combined) else: - with open(output,'w') as out: - out.write(get_combined_compileyaml(combined)) - print(f"COMBINE OUT HERE: {os.path.abspath(output)}") + get_combined_compileyaml(combined,experiment,output) elif use =="pp": combined = ppip.InitPPYaml(yamlfile, experiment, platform, target, join_constructor) - if output is None: - get_combined_ppyaml(combined) + if output is False: + yml_dict = get_combined_ppyaml(combined) else: - with open(output,'w') as out: - out.write(get_combined_ppyaml(combined)) - print(f"COMBINE OUT HERE: {os.path.abspath(output)}") + yml_dict = get_combined_ppyaml(combined,experiment,output) + else: raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") + return yml_dict # Use parseyaml function to parse created edits.yaml if __name__ == '__main__': consolidate_yamls() diff --git a/fre/yamltools/pp_info_parser.py b/fre/yamltools/pp_info_parser.py index 8453432d..9eee0a96 100644 --- a/fre/yamltools/pp_info_parser.py +++ b/fre/yamltools/pp_info_parser.py @@ -174,11 +174,13 @@ def combine_analysis(self,yaml_content,loaded_yaml): return analysis_yamls - def merge_multiple_yamls(self, pp_list, analysis_list): + def merge_multiple_yamls(self, pp_list, analysis_list, loaded_yaml): """ Merge separately combined post-processing and analysis yamls into fully combined yaml (without overwriting like sections). """ + (ey_path,ay_path) = experiment_check(self.mainyaml_dir,self.name,loaded_yaml) + result = {} # If more than one post-processing yaml is listed, update @@ -191,7 +193,6 @@ def merge_multiple_yamls(self, pp_list, analysis_list): yml_pp = "".join(pp_list[0]) result.update(yaml.load(yml_pp,Loader=yaml.Loader)) #print(f" experiment yaml: {exp}") -# print(pp_list[0]) for i in pp_list[1:]: uhm = "".join(i) @@ -228,14 +229,14 @@ def merge_multiple_yamls(self, pp_list, analysis_list): elif analysis_list is not None and len(analysis_list) == 1: pass -# if pp_list is not None: -# for i in pp_list: -# exp = str(i).rsplit('/', maxsplit=1)[-1] -# print(f" experiment yaml: {exp}") -# if analysis_list is not None: -# for i in analysis_list: -# analysis = str(i).rsplit('/', maxsplit=1)[-1] -# print(f" analysis yaml: {analysis}") + if ey_path is not None: + for i in ey_path: + exp = str(i).rsplit('/', maxsplit=1)[-1] + print(f" experiment yaml: {exp}") + if ay_path is not None: + for i in ay_path: + analysis = str(i).rsplit('/', maxsplit=1)[-1] + print(f" analysis yaml: {analysis}") return result From 1e5aafacb2dde1ae1b7b5dbbf7e2dc8a6de914ca Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 19 Feb 2025 12:49:58 -0500 Subject: [PATCH 13/24] #346 Add back test to check for existence of combined yaml --- fre/pp/tests/test_configure_script_yaml.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 7d5be885..6bcc6a85 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -44,7 +44,8 @@ def test_configure_script(): os.environ["HOME"] = old_home # Check for configuration creation and final combined yaml - assert all([ Path(f"{OUT_DIR}/rose-suite.conf").exists(), + assert all([ Path(f"{OUT_DIR}/{EXPERIMENT}.yaml"), + Path(f"{OUT_DIR}/rose-suite.conf").exists(), Path(f"{OUT_DIR}/app/regrid-xy/rose-app.conf").exists(), Path(f"{OUT_DIR}/app/remap-pp-components/rose-app.conf").exists() ]) From 1c6a681524ace11ee74d749103e5eb6baa540d1c Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 19 Feb 2025 13:03:25 -0500 Subject: [PATCH 14/24] #346 Update scripts --- fre/pp/configure_script_yaml.py | 3 --- fre/pp/tests/test_configure_script_yaml.py | 2 +- fre/yamltools/combine_yamls_script.py | 2 ++ fre/yamltools/compile_info_parser.py | 17 +++++++++++------ fre/yamltools/pp_info_parser.py | 13 +++++-------- 5 files changed, 19 insertions(+), 18 deletions(-) diff --git a/fre/pp/configure_script_yaml.py b/fre/pp/configure_script_yaml.py index c8ce2126..75ce01dc 100644 --- a/fre/pp/configure_script_yaml.py +++ b/fre/pp/configure_script_yaml.py @@ -34,9 +34,6 @@ def validate_yaml(yamlfile): """ Using the schema.json file, the yaml format is validated. """ -# # Load the yaml -# yml = yaml_load(yamlfile) - schema_dir = Path(__file__).resolve().parents[1] schema_path = os.path.join(schema_dir, 'gfdl_msd_schemas', 'FRE', 'fre_pp.json') # Load the json schema: .load() (vs .loads()) reads and parses the json in one) diff --git a/fre/pp/tests/test_configure_script_yaml.py b/fre/pp/tests/test_configure_script_yaml.py index 6bcc6a85..24232710 100644 --- a/fre/pp/tests/test_configure_script_yaml.py +++ b/fre/pp/tests/test_configure_script_yaml.py @@ -44,7 +44,7 @@ def test_configure_script(): os.environ["HOME"] = old_home # Check for configuration creation and final combined yaml - assert all([ Path(f"{OUT_DIR}/{EXPERIMENT}.yaml"), + assert all([ Path(f"{OUT_DIR}/{EXPERIMENT}.yaml").exists(), Path(f"{OUT_DIR}/rose-suite.conf").exists(), Path(f"{OUT_DIR}/app/regrid-xy/rose-app.conf").exists(), Path(f"{OUT_DIR}/app/remap-pp-components/rose-app.conf").exists() ]) diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index f498286d..5f34f972 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -18,6 +18,8 @@ def join_constructor(loader, node): def output_yaml(cleaned_yaml,experiment,output): """ + Write out the combined yaml dictionary info + to a file if --output is specified """ filename = output with open(filename,'w') as out: diff --git a/fre/yamltools/compile_info_parser.py b/fre/yamltools/compile_info_parser.py index 3fc2f88e..fd80cd52 100644 --- a/fre/yamltools/compile_info_parser.py +++ b/fre/yamltools/compile_info_parser.py @@ -3,6 +3,8 @@ def get_compile_paths(full_path,loaded_yml): """ + Find and return the paths for the compile + and platform yamls """ for key,value in loaded_yml.items(): if key == "build": @@ -12,7 +14,7 @@ def get_compile_paths(full_path,loaded_yml): return (py_path, cy_path) ## COMPILE CLASS ## -class init_compile_yaml(): +class InitCompileYaml(): """ class holding routines for initalizing compilation yamls """ def __init__(self,yamlfile,platform,target,join_constructor): """ @@ -30,6 +32,9 @@ def __init__(self,yamlfile,platform,target,join_constructor): # Path to the main model yaml self.mainyaml_dir = os.path.dirname(self.yml) + # Create combined compile yaml + print("Combining yaml files into one dictionary: ") + def combine_model(self): """ Create the combined.yaml and merge it with the model yaml @@ -107,15 +112,15 @@ def clean_yaml(self, yaml_content): final combined yaml. """ # Load the yaml - yml=yaml.load(yaml_content, Loader=yaml.Loader) + yml_dict=yaml.load(yaml_content, Loader=yaml.Loader) # Clean the yaml # If keys exists, delete: keys_clean=["fre_properties", "shared", "experiments"] for kc in keys_clean: - if kc in yml.keys(): - del yml[kc] + if kc in yml_dict.keys(): + del yml_dict[kc] - cleaned_yml = yaml.safe_dump(yml,default_flow_style=False,sort_keys=False) + cleaned_yml = yaml.safe_dump(yml_dict,default_flow_style=False,sort_keys=False) - return cleaned_yml + return cleaned_yml #yml_dict #either return dictionary OR string - string works for fremake but dictionary works for pp and list diff --git a/fre/yamltools/pp_info_parser.py b/fre/yamltools/pp_info_parser.py index 9eee0a96..68579389 100644 --- a/fre/yamltools/pp_info_parser.py +++ b/fre/yamltools/pp_info_parser.py @@ -12,8 +12,6 @@ def experiment_check(mainyaml_dir,experiment,loaded_yaml): comb : combined yaml file name experiment : experiment name """ -# comb_model=yaml_load(comb) -# # Check if exp name given is actually valid experiment listed in combined yaml exp_list = [] for i in loaded_yaml.get("experiments"): @@ -195,8 +193,8 @@ def merge_multiple_yamls(self, pp_list, analysis_list, loaded_yaml): #print(f" experiment yaml: {exp}") for i in pp_list[1:]: - uhm = "".join(i) - yf = yaml.load(uhm,Loader=yaml.Loader) + pp_list_to_string_concat = "".join(i) + yf = yaml.load(pp_list_to_string_concat,Loader=yaml.Loader) for key in result: if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): @@ -216,9 +214,8 @@ def merge_multiple_yamls(self, pp_list, analysis_list, loaded_yaml): result.update(yaml.load(yml_analysis,Loader=yaml.Loader)) for i in analysis_list[1:]: - #more_new4 = "".join(i) - uhm_again = "".join(i) - yf = yaml.load(uhm_again,Loader=yaml.Loader) + analysis_list_to_string_concat = "".join(i) + yf = yaml.load(analysis_list_to_string_concat,Loader=yaml.Loader) for key in result: if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): @@ -253,5 +250,5 @@ def clean_yaml(self,yml_dict): del yml_dict[kc] # Dump cleaned dictionary back into combined yaml file -# cleaned_yaml = yaml.safe_dump(yml_dict,default_flow_style=False,sort_keys=False) + #cleaned_yaml = yaml.safe_dump(yml_dict,default_flow_style=False,sort_keys=False) return yml_dict From ce7c8f169fd6fc36f050393d3bc5774ea5c88ad7 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 19 Feb 2025 13:05:27 -0500 Subject: [PATCH 15/24] #346 Note original `combine_yamls.py` kept - other tools also need to be changed with these PR changes so this older script is kept to not break multiple tools --- fre/yamltools/combine_yamls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fre/yamltools/combine_yamls.py b/fre/yamltools/combine_yamls.py index c31a2fb8..8f1df3f3 100755 --- a/fre/yamltools/combine_yamls.py +++ b/fre/yamltools/combine_yamls.py @@ -1,5 +1,5 @@ """ -Script combines the model yaml with the compile, platform, and experiment yamls. +OLDER script that combines the model yaml with the compile, platform, and experiment yamls. """ ## TO-DO: From 8639fa1c9bc043e6c4ff5cb7225321fa4a4cbadc Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 19 Feb 2025 14:16:47 -0500 Subject: [PATCH 16/24] #346 Add test file for combine_yamls_script.py --- fre/yamltools/pp_info_parser.py | 5 +- .../tests/test_combine_yamls_script.py | 319 ++++++++++++++++++ 2 files changed, 323 insertions(+), 1 deletion(-) create mode 100644 fre/yamltools/tests/test_combine_yamls_script.py diff --git a/fre/yamltools/pp_info_parser.py b/fre/yamltools/pp_info_parser.py index 68579389..e6a5dba6 100644 --- a/fre/yamltools/pp_info_parser.py +++ b/fre/yamltools/pp_info_parser.py @@ -199,7 +199,10 @@ def merge_multiple_yamls(self, pp_list, analysis_list, loaded_yaml): if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): if key == "postprocess": - result[key]["components"] = yf[key]["components"] + result[key]["components"] + if 'components' in result['postprocess']: + result['postprocess']["components"] += yf['postprocess']["components"] + result[key]["components"] + else: + result['postprocess']["components"] = yf['postprocess']["components"] # If only one post-processing yaml listed, do nothing # (already combined in 'combine_experiments' function) elif pp_list is not None and len(pp_list) == 1: diff --git a/fre/yamltools/tests/test_combine_yamls_script.py b/fre/yamltools/tests/test_combine_yamls_script.py new file mode 100644 index 00000000..1b099fbc --- /dev/null +++ b/fre/yamltools/tests/test_combine_yamls_script.py @@ -0,0 +1,319 @@ +""" +tests routines in fre.yamltools.combine_yamls +""" +import os +from pathlib import Path +import pytest +import shutil +import json +import yaml +import pprint +from jsonschema import validate +from fre.yamltools import combine_yamls as cy_original +from fre.yamltools import combine_yamls_script as cy + + +## SET-UP +# Set example yaml paths, input directory, output directory +#CWD = Path.cwd() +TEST_DIR = Path("fre/yamltools/tests") +IN_DIR = Path(f"{TEST_DIR}/AM5_example") +SCHEMA_DIR = Path("fre/gfdl_msd_schemas/FRE") + +# Create output directories +COMP_OUT_DIR = Path(f"{TEST_DIR}/combine_yamls_out/compile") +PP_OUT_DIR = Path(f"{TEST_DIR}/combine_yamls_out/pp") + +# If output directory exists, remove and create again +for out in [COMP_OUT_DIR, PP_OUT_DIR]: + if out.exists(): + shutil.rmtree(out) + Path(out).mkdir(parents=True,exist_ok=True) + else: + Path(out).mkdir(parents=True,exist_ok=True) + +## Set what would be click options +# Compile +COMP_EXPERIMENT = "am5" +COMP_PLATFORM = "ncrc5.intel23" +COMP_TARGET = "prod" + +# Post-processing +PP_EXPERIMENT = "c96L65_am5f7b12r1_amip" +PP_PLATFORM = "gfdl.ncrc5-intel22-classic" +PP_TARGET = "prod" + +def test_modelyaml_exists(): + """ + Make sure main yaml file exists + """ + assert Path(f"{IN_DIR}/am5.yaml").exists() + +def test_compileyaml_exists(): + """ + Make sure experiment yaml file exists + """ + assert Path(f"{IN_DIR}/compile_yamls/compile.yaml").exists() + +def test_platformyaml_exists(): + """ + Make sure experiment yaml file exists + """ + assert Path(f"{IN_DIR}/compile_yamls/platforms.yaml").exists() + +def test_merged_compile_yamls(): + """ + Check for the creation of the combined-[experiment] yaml + Check that the model yaml was merged into the combined yaml + """ + # Model yaml path + modelyaml = str(Path(f"{IN_DIR}/am5.yaml")) + use = "compile" + + # Merge the yamls + cy_original.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) + + # Move combined yaml to output location + shutil.move(f"{IN_DIR}/combined-am5.yaml", COMP_OUT_DIR) + + # Check that the combined yaml exists + assert Path(f"{COMP_OUT_DIR}/combined-{COMP_EXPERIMENT}.yaml").exists() + +def test_combined_compileyaml_validation(): + """ + Validate the combined compile yaml + """ + combined_yamlfile =f"{COMP_OUT_DIR}/combined-{COMP_EXPERIMENT}.yaml" + schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") + + with open(combined_yamlfile,'r') as cf: + yml = yaml.safe_load(cf) + + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) + + # If the yaml is valid, no issues + # If the yaml is not valid, error + try: + validate(instance=yml,schema=schema) + except: + assert False + +def test_combined_compileyaml_combinefail(): + """ + Check to test if compile yaml is incorrect/does not exist, + the combine fails. (compile yaml path misspelled) + """ + # Model yaml path + modelyaml = str(Path(f"{IN_DIR}/compile_yamls/compile_fail/am5-wrong_compilefile.yaml")) + use = "compile" + + # Merge the yamls - should fail since there is no compile yaml specified in the model yaml + try: + cy_original.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) + # Move combined yaml to output location + shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) + except: + print("EXPECTED FAILURE") + # Move combined yaml to output location + shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) + assert True + +def test_combined_compileyaml_validatefail(): + """ + Check if the schema is validating correctly + Branch should be string + """ + # Model yaml path + modelyaml = str(Path(f"{IN_DIR}/compile_yamls/compile_fail/am5-wrong_datatype.yaml")) + use = "compile" + + # Merge the yamls + cy_original.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) + + # Move combined yaml to output location + shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_datatype.yaml", COMP_OUT_DIR) + + # Validate against schema; should fail + wrong_combined = Path(f"{COMP_OUT_DIR}/combined-am5-wrong_datatype.yaml") + schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") + + # Open/load combined yaml file + with open(wrong_combined,'r') as cf: + yml = yaml.safe_load(cf) + + # Open/load schema.jaon + with open(schema_file,'r') as f: + s = f.read() + schema = json.loads(s) + + # Validation should fail + try: + validate(instance=yml,schema=schema) + except: + assert True + +############ PP ############ +def test_expyaml_exists(): + """ + Make sure experiment yaml file exists + """ + assert Path(f"{IN_DIR}/pp_yamls/pp.c96_amip.yaml").exists() + +@pytest.mark.skip(reason='analysis scripts might not be defined yet') +def test_analysisyaml_exists(): + """ + Make sure experiment yaml file exists + """ + assert Path(f"{IN_DIR}/pp_yamls/analysis.yaml").exists() + +def test_merged_pp_yamls(): + """ + Check for the creation of the combined-[experiment] yaml + Check that the model yaml was merged into the combined yaml + """ + # Model yaml path + modelyaml = Path(f"{IN_DIR}/am5.yaml") + use = "pp" + + # Merge the yamls + try: + cy.consolidate_yamls(modelyaml, PP_EXPERIMENT, PP_PLATFORM, PP_TARGET, use, output=None) + except: + assert False + +def test_combined_ppyaml_validation(): + """ + Validate the combined compile yaml + """ + modelyaml = Path(f"{IN_DIR}/am5.yaml") + use = 'pp' + + # Merge the yamls + try: + out = cy.consolidate_yamls(modelyaml, PP_EXPERIMENT, PP_PLATFORM, PP_TARGET, use, output=None) + except: + assert False + + schema_dir = Path(__file__).resolve().parents[2] + schema_path = os.path.join(schema_dir, 'gfdl_msd_schemas', 'FRE', 'fre_pp.json') + with open(schema_path,'r') as f: + s = f.read() + schema = json.loads(s) + + validate(instance=out,schema=schema) + +def test_combine_pp_yamls(tmp_path): + """ + Verify yaml combiner functionality by combining + a model yaml with 3 pp yamls (2 with components). + """ + + model = { + 'experiments' : [ + { + 'name' : 'expname', + 'pp' : [ + 'pp1.yaml', + 'pp2.yaml', + 'pp3.yaml' + ] + } + ] + } + + pp1 = { + 'directories' : { + 'history_dir': 'one', + 'pp_dir' : 'two' + }, + 'postprocess' : { + 'settings' : { + 'history_segment' : 'three', + 'pp_start' : 'four' + } + } + } + + pp2 = { + 'postprocess' : { + 'components' : [ + { + 'type' : 'atmos_cmip', + 'sources' : "foo bar" }, + { + 'type' : 'land', + 'sources' : "land_month"} + ] + } + } + + pp3 = { + 'postprocess' : { + 'components' : [ + { + 'type' : 'ocean', + 'sources' : "a b c" }, + { + 'type' : 'ice', + 'sources' : "ice_month"} + ] + } + } + + combined = { + 'name' : 'expname', + 'platform' : 'platform', + 'target' : 'target', + 'directories' : { + 'history_dir': 'one', + 'pp_dir' : 'two' + }, + 'postprocess' : { + 'settings' : { + 'history_segment' : 'three', + 'pp_start' : 'four' + }, + 'components' : [ + { + 'type' : 'atmos_cmip', + 'sources' : "foo bar" }, + { + 'type' : 'land', + 'sources' : "land_month"}, + { + 'type' : 'ocean', + 'sources' : "a b c" }, + { + 'type' : 'ice', + 'sources' : "ice_month"} + ] + } + } + + # create temp directory + tmp_path.mkdir(exist_ok=True) + + # write model and pp yamls + file_model = open(tmp_path / 'model.yaml', 'w') + file_pp1 = open(tmp_path / 'pp1.yaml', 'w') + file_pp2 = open(tmp_path / 'pp2.yaml', 'w') + file_pp3 = open(tmp_path / 'pp3.yaml', 'w') + + yaml.dump(model, file_model) + yaml.dump(pp1, file_pp1) + yaml.dump(pp2, file_pp2) + yaml.dump(pp3, file_pp3) + + # combine the yamls + # output is a combined dictionary of necessary yaml info + output = cy.consolidate_yamls(tmp_path / 'model.yaml', 'expname', 'platform', 'target', 'pp', output=None) + pp = pprint.PrettyPrinter(indent=4) + pp.pprint(output) + + assert output == combined + +## TO-DO: +# - add tests for if output option is defined +# - fix fre make code for combined yaml From c44e93cb3ee4fc6556dd06d60a4b934bdfceb5e4 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 19 Feb 2025 14:35:17 -0500 Subject: [PATCH 17/24] #346 Fix output option for combining compile yamls --- fre/yamltools/combine_yamls_script.py | 15 ++++++++++++--- fre/yamltools/compile_info_parser.py | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index 5f34f972..10480a21 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -26,7 +26,7 @@ def output_yaml(cleaned_yaml,experiment,output): out.write(yaml.dump(cleaned_yaml,default_flow_style=False,sort_keys=False)) ## Functions to combine the yaml files ## -def get_combined_compileyaml(comb): +def get_combined_compileyaml(comb,output=None): """ Combine the model, compile, and platform yamls Arguments: @@ -51,6 +51,13 @@ def get_combined_compileyaml(comb): # Clean the yaml cleaned_yaml = comb.clean_yaml(yaml_content) + + # OUTPUT IF NEEDED + if output is not None: + output_yaml(cleaned_yaml,experiment=None,output=output) + else: + print("Combined yaml information saved as dictionary") + return cleaned_yaml def get_combined_ppyaml(comb,experiment,output=None): @@ -104,9 +111,10 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): combined = cip.InitCompileYaml(yamlfile, platform, target, join_constructor) if output is False: - get_combined_compileyaml(combined) + yml_dict = get_combined_compileyaml(combined) else: - get_combined_compileyaml(combined,experiment,output) + yml_dict = get_combined_compileyaml(combined,output) + print(f"Combined yaml file located here: {os.getcwd()}/{output}") elif use =="pp": combined = ppip.InitPPYaml(yamlfile, experiment, platform, target, join_constructor) @@ -115,6 +123,7 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): yml_dict = get_combined_ppyaml(combined) else: yml_dict = get_combined_ppyaml(combined,experiment,output) + print(f"Combined yaml file located here: {os.getcwd()}/{output}") else: raise ValueError("'use' value is not valid; must be 'compile' or 'pp'") diff --git a/fre/yamltools/compile_info_parser.py b/fre/yamltools/compile_info_parser.py index fd80cd52..b599084e 100644 --- a/fre/yamltools/compile_info_parser.py +++ b/fre/yamltools/compile_info_parser.py @@ -123,4 +123,4 @@ def clean_yaml(self, yaml_content): cleaned_yml = yaml.safe_dump(yml_dict,default_flow_style=False,sort_keys=False) - return cleaned_yml #yml_dict #either return dictionary OR string - string works for fremake but dictionary works for pp and list + return yml_dict #either return dictionary OR string (cleaned_yml) - string works for fremake but dictionary works for pp and list From 0a126adc4cbd96b3a889166948cfed6cbbedbcc9 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 19 Feb 2025 15:04:45 -0500 Subject: [PATCH 18/24] #346 Fix tests for combined compile operations --- .../tests/test_combine_yamls_script.py | 53 ++++++++----------- 1 file changed, 22 insertions(+), 31 deletions(-) diff --git a/fre/yamltools/tests/test_combine_yamls_script.py b/fre/yamltools/tests/test_combine_yamls_script.py index 1b099fbc..2c629c7a 100644 --- a/fre/yamltools/tests/test_combine_yamls_script.py +++ b/fre/yamltools/tests/test_combine_yamls_script.py @@ -9,7 +9,6 @@ import yaml import pprint from jsonschema import validate -from fre.yamltools import combine_yamls as cy_original from fre.yamltools import combine_yamls_script as cy @@ -71,24 +70,26 @@ def test_merged_compile_yamls(): use = "compile" # Merge the yamls - cy_original.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) - - # Move combined yaml to output location - shutil.move(f"{IN_DIR}/combined-am5.yaml", COMP_OUT_DIR) - - # Check that the combined yaml exists - assert Path(f"{COMP_OUT_DIR}/combined-{COMP_EXPERIMENT}.yaml").exists() + try: + cy.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use, output = None) + except: + assert False def test_combined_compileyaml_validation(): """ Validate the combined compile yaml """ - combined_yamlfile =f"{COMP_OUT_DIR}/combined-{COMP_EXPERIMENT}.yaml" - schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") + # Model yaml path + modelyaml = str(Path(f"{IN_DIR}/am5.yaml")) + use = "compile" - with open(combined_yamlfile,'r') as cf: - yml = yaml.safe_load(cf) + # Merge the yamls + try: + out = cy.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use, output = None) + except: + assert False + schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") with open(schema_file,'r') as f: s = f.read() schema = json.loads(s) @@ -96,7 +97,7 @@ def test_combined_compileyaml_validation(): # If the yaml is valid, no issues # If the yaml is not valid, error try: - validate(instance=yml,schema=schema) + validate(instance=out,schema=schema) except: assert False @@ -111,13 +112,9 @@ def test_combined_compileyaml_combinefail(): # Merge the yamls - should fail since there is no compile yaml specified in the model yaml try: - cy_original.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) - # Move combined yaml to output location - shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) + out = cy.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use, output = None) except: print("EXPECTED FAILURE") - # Move combined yaml to output location - shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_compilefile.yaml", COMP_OUT_DIR) assert True def test_combined_compileyaml_validatefail(): @@ -130,19 +127,14 @@ def test_combined_compileyaml_validatefail(): use = "compile" # Merge the yamls - cy_original.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use) - - # Move combined yaml to output location - shutil.move(f"{IN_DIR}/compile_yamls/compile_fail/combined-am5-wrong_datatype.yaml", COMP_OUT_DIR) + try: + out = cy.consolidate_yamls(modelyaml, COMP_EXPERIMENT, COMP_PLATFORM, COMP_TARGET, use, output = None) + except: + assert False # Validate against schema; should fail - wrong_combined = Path(f"{COMP_OUT_DIR}/combined-am5-wrong_datatype.yaml") schema_file = os.path.join(SCHEMA_DIR, "fre_make.json") - # Open/load combined yaml file - with open(wrong_combined,'r') as cf: - yml = yaml.safe_load(cf) - # Open/load schema.jaon with open(schema_file,'r') as f: s = f.read() @@ -150,7 +142,7 @@ def test_combined_compileyaml_validatefail(): # Validation should fail try: - validate(instance=yml,schema=schema) + validate(instance=out,schema=schema) except: assert True @@ -196,9 +188,8 @@ def test_combined_ppyaml_validation(): except: assert False - schema_dir = Path(__file__).resolve().parents[2] - schema_path = os.path.join(schema_dir, 'gfdl_msd_schemas', 'FRE', 'fre_pp.json') - with open(schema_path,'r') as f: + schema_file = os.path.join(SCHEMA_DIR, "fre_pp.json") + with open(schema_file,'r') as f: s = f.read() schema = json.loads(s) From 9e734666c8942505ed548f8c483791cfc180dfa7 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Wed, 19 Feb 2025 15:05:39 -0500 Subject: [PATCH 19/24] #346 Update comment --- fre/yamltools/tests/test_combine_yamls_script.py | 1 - 1 file changed, 1 deletion(-) diff --git a/fre/yamltools/tests/test_combine_yamls_script.py b/fre/yamltools/tests/test_combine_yamls_script.py index 2c629c7a..efd5d8ab 100644 --- a/fre/yamltools/tests/test_combine_yamls_script.py +++ b/fre/yamltools/tests/test_combine_yamls_script.py @@ -307,4 +307,3 @@ def test_combine_pp_yamls(tmp_path): ## TO-DO: # - add tests for if output option is defined -# - fix fre make code for combined yaml From fbea26328626e4d72c4ab6a2abaa454e6215b1f0 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 20 Feb 2025 15:20:28 -0500 Subject: [PATCH 20/24] #346 Try to get rid of some nested if statements --- fre/yamltools/combine_yamls_script.py | 6 +++--- fre/yamltools/pp_info_parser.py | 23 ++++++++++++----------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index 10480a21..569531a2 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -110,7 +110,7 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): if use == "compile": combined = cip.InitCompileYaml(yamlfile, platform, target, join_constructor) - if output is False: + if output is None : yml_dict = get_combined_compileyaml(combined) else: yml_dict = get_combined_compileyaml(combined,output) @@ -119,8 +119,8 @@ def consolidate_yamls(yamlfile,experiment,platform,target,use,output): elif use =="pp": combined = ppip.InitPPYaml(yamlfile, experiment, platform, target, join_constructor) - if output is False: - yml_dict = get_combined_ppyaml(combined) + if output is None: + yml_dict = get_combined_ppyaml(combined,experiment) else: yml_dict = get_combined_ppyaml(combined,experiment,output) print(f"Combined yaml file located here: {os.getcwd()}/{output}") diff --git a/fre/yamltools/pp_info_parser.py b/fre/yamltools/pp_info_parser.py index e6a5dba6..bade5428 100644 --- a/fre/yamltools/pp_info_parser.py +++ b/fre/yamltools/pp_info_parser.py @@ -27,17 +27,18 @@ def experiment_check(mainyaml_dir,experiment,loaded_yaml): expyaml=i.get("pp") analysisyaml=i.get("analysis") - if expyaml is not None: - ey_path=[] - for e in expyaml: - if Path(os.path.join(mainyaml_dir,e)).exists(): - ey=Path(os.path.join(mainyaml_dir,e)) - ey_path.append(ey) - else: - raise ValueError(f"Experiment yaml path given ({e}) does not exist.") - else: + if expyaml is None: raise ValueError("No experiment yaml path given!") + ey_path=[] + for e in expyaml: + if not Path(os.path.join(mainyaml_dir,e)).exists(): + raise ValueError(f"Experiment yaml path given ({e}) does not exist.") + + ey=Path(os.path.join(mainyaml_dir,e)) + ey_path.append(ey) + + # Currently, if there are no analysis scripts defined, set None if analysisyaml is not None: ay_path=[] for a in analysisyaml: @@ -222,8 +223,8 @@ def merge_multiple_yamls(self, pp_list, analysis_list, loaded_yaml): for key in result: if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): - if key == "analysis": - result[key] = yf[key] | result[key] +# if key == "analysis": + result['analysis'] = yf['analysis'] | result['analysis'] # If only one analysis yaml listed, do nothing # (already combined in 'combine_analysis' function) elif analysis_list is not None and len(analysis_list) == 1: From 55766cb4bdca5f590e368de9fc4c23165ec8ab1c Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 20 Feb 2025 15:24:11 -0500 Subject: [PATCH 21/24] #346 Fix error messages --- fre/yamltools/combine_yamls_script.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/fre/yamltools/combine_yamls_script.py b/fre/yamltools/combine_yamls_script.py index 569531a2..5812e25b 100755 --- a/fre/yamltools/combine_yamls_script.py +++ b/fre/yamltools/combine_yamls_script.py @@ -35,19 +35,19 @@ def get_combined_compileyaml(comb,output=None): try: (yaml_content, loaded_yaml)=comb.combine_model() except: - raise ValueError("uh oh") + raise ValueError("ERR: Could not merge model information.") # Merge compile into combined file to create updated yaml_content/yaml try: (yaml_content, loaded_yaml) = comb.combine_compile(yaml_content, loaded_yaml) except: - raise ValueError("uh oh again") + raise ValueError("ERR: Could not merge compile yaml information.") # Merge platforms.yaml into combined file try: (yaml_content,loaded_yaml) = comb.combine_platforms(yaml_content, loaded_yaml) except: - raise ValueError("uh oh one more time") + raise ValueError("ERR: Could not merge platform yaml information.") # Clean the yaml cleaned_yaml = comb.clean_yaml(yaml_content) @@ -70,26 +70,26 @@ def get_combined_ppyaml(comb,experiment,output=None): # Merge model into combined file (yaml_content, loaded_yaml) = comb.combine_model() except: - raise ValueError("pp uh oh 1") + raise ValueError("ERR: Could not merge model information.") try: # Merge pp experiment yamls into combined file comb_pp_updated_list = comb.combine_experiment(yaml_content, loaded_yaml) except: - raise ValueError("pp uh oh 2") + raise ValueError("ERR: Could not merge pp experiment yaml information") try: # Merge analysis yamls, if defined, into combined file comb_analysis_updated_list = comb.combine_analysis(yaml_content, loaded_yaml) except: - raise ValueError("uh oh 3") + raise ValueError("ERR: Could not merge analysis yaml information") try: # Merge model/pp and model/analysis yamls if more than 1 is defined # (without overwriting the yaml) full_combined = comb.merge_multiple_yamls(comb_pp_updated_list, comb_analysis_updated_list,loaded_yaml) except: - raise ValueError("uh oh 4") + raise ValueError("ERR: Could not merge multiple pp and analysis information together.") # Clean the yaml cleaned_yaml = comb.clean_yaml(full_combined) From 66e89318ea8d82dc76b2d3e0ab52c4601b710c55 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Thu, 20 Feb 2025 15:48:40 -0500 Subject: [PATCH 22/24] #346 Try to get rid of oh so many nested if statements --- fre/yamltools/pp_info_parser.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/fre/yamltools/pp_info_parser.py b/fre/yamltools/pp_info_parser.py index bade5428..a7802a8e 100644 --- a/fre/yamltools/pp_info_parser.py +++ b/fre/yamltools/pp_info_parser.py @@ -197,13 +197,16 @@ def merge_multiple_yamls(self, pp_list, analysis_list, loaded_yaml): pp_list_to_string_concat = "".join(i) yf = yaml.load(pp_list_to_string_concat,Loader=yaml.Loader) for key in result: + # Only concerned with merging component information in "postprocess" sections across yamls + if key != "postprocess": + continue if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): - if key == "postprocess": - if 'components' in result['postprocess']: - result['postprocess']["components"] += yf['postprocess']["components"] + result[key]["components"] - else: - result['postprocess']["components"] = yf['postprocess']["components"] + if 'components' in result['postprocess']: + result['postprocess']["components"] += yf['postprocess']["components"] + result[key]["components"] + else: + result['postprocess']["components"] = yf['postprocess']["components"] + # If only one post-processing yaml listed, do nothing # (already combined in 'combine_experiments' function) elif pp_list is not None and len(pp_list) == 1: From ab83cb544f3dfe36722827fa73f43f9919b243b2 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 24 Feb 2025 16:53:49 -0500 Subject: [PATCH 23/24] #346 Add sort_keys = False --- fre/yamltools/tests/test_combine_yamls_script.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/fre/yamltools/tests/test_combine_yamls_script.py b/fre/yamltools/tests/test_combine_yamls_script.py index efd5d8ab..02d76eab 100644 --- a/fre/yamltools/tests/test_combine_yamls_script.py +++ b/fre/yamltools/tests/test_combine_yamls_script.py @@ -286,16 +286,17 @@ def test_combine_pp_yamls(tmp_path): # create temp directory tmp_path.mkdir(exist_ok=True) - # write model and pp yamls + # create model and pp yamls file_model = open(tmp_path / 'model.yaml', 'w') file_pp1 = open(tmp_path / 'pp1.yaml', 'w') file_pp2 = open(tmp_path / 'pp2.yaml', 'w') file_pp3 = open(tmp_path / 'pp3.yaml', 'w') - yaml.dump(model, file_model) - yaml.dump(pp1, file_pp1) - yaml.dump(pp2, file_pp2) - yaml.dump(pp3, file_pp3) + # write to/ dump info into created model and pp yamls + yaml.dump(model, file_model, default_flow_style=False, sort_keys=False) + yaml.dump(pp1, file_pp1, default_flow_style=False, sort_keys=False) + yaml.dump(pp2, file_pp2, default_flow_style=False, sort_keys=False) + yaml.dump(pp3, file_pp3, default_flow_style=False, sort_keys=False) # combine the yamls # output is a combined dictionary of necessary yaml info @@ -303,6 +304,7 @@ def test_combine_pp_yamls(tmp_path): pp = pprint.PrettyPrinter(indent=4) pp.pprint(output) + # compare dictionaries assert output == combined ## TO-DO: From 4f391e3a30db3f6a66cf22ee155c5a78a08f4136 Mon Sep 17 00:00:00 2001 From: Dana Singh Date: Mon, 24 Feb 2025 17:23:16 -0500 Subject: [PATCH 24/24] #346 Remove repetitive addition to a list --- fre/yamltools/pp_info_parser.py | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/fre/yamltools/pp_info_parser.py b/fre/yamltools/pp_info_parser.py index a7802a8e..c87c2f2e 100644 --- a/fre/yamltools/pp_info_parser.py +++ b/fre/yamltools/pp_info_parser.py @@ -120,13 +120,7 @@ def combine_experiment(self, yaml_content, loaded_yaml): # If more than 1 pp yaml listed # (Must be done for aliases defined) elif ey_path is not None and len(ey_path) > 1: - with open(ey_path[0],'r') as eyp0: - exp_content = eyp0.read() #string - - exp_info = yaml_content + exp_content - pp_yamls.append([exp_info]) - - for i in ey_path[1:]: + for i in ey_path: with open(i,'r') as eyp: exp_content = eyp.read() @@ -158,13 +152,7 @@ def combine_analysis(self,yaml_content,loaded_yaml): # If more than 1 pp yaml listed # (Must be done for aliases defined) elif ay_path is not None and len(ay_path) > 1: - with open(ay_path[0],'r') as ayp0: - analysis_content = ayp0.read() - - analysis_info = yaml_content + analysis_content - analysis_yamls.append([analysis_info]) - - for i in ay_path[1:]: + for i in ay_path: with open(i,'r') as ayp: analysis_content = ayp.read() @@ -203,7 +191,7 @@ def merge_multiple_yamls(self, pp_list, analysis_list, loaded_yaml): if key in yf: if isinstance(result[key],dict) and isinstance(yf[key],dict): if 'components' in result['postprocess']: - result['postprocess']["components"] += yf['postprocess']["components"] + result[key]["components"] + result['postprocess']["components"] += yf['postprocess']["components"] else: result['postprocess']["components"] = yf['postprocess']["components"]