From ddd23ad278120fda5e84ef216eb223883c613ac2 Mon Sep 17 00:00:00 2001 From: Rajendra Adhikari Date: Thu, 21 Dec 2023 14:07:31 -0600 Subject: [PATCH 1/2] Upgrades analyzer bug fix and remember inputs --- buildstock_query/helpers.py | 27 ++++++++++ buildstock_query/tools/upgrades_analyzer.py | 52 +++++++++---------- .../upgrades_visualizer.py | 24 +++++---- 3 files changed, 66 insertions(+), 37 deletions(-) diff --git a/buildstock_query/helpers.py b/buildstock_query/helpers.py index 5e38dd4..8598f87 100644 --- a/buildstock_query/helpers.py +++ b/buildstock_query/helpers.py @@ -5,6 +5,8 @@ import pickle import os import pandas as pd +from pathlib import Path +import json from typing import Literal, TYPE_CHECKING if TYPE_CHECKING: @@ -151,3 +153,28 @@ def read_csv(csv_file_path, **kwargs) -> pd.DataFrame: default_na_values = pd._libs.parsers.STR_NA_VALUES df = pd.read_csv(csv_file_path, na_values=list(default_na_values - {"None"}), keep_default_na=False, **kwargs) return df + + +def load_script_defaults(defaults_name): + """ + Load the default input for script from cache + """ + cache_folder = Path(".bsq_cache") + cache_folder.mkdir(exist_ok=True) + defaults_cache = cache_folder / f"{defaults_name}_defaults.json" + defaults = {} + if defaults_cache.exists(): + with open(defaults_cache) as f: + defaults = json.load(f) + return defaults + + +def save_script_defaults(defaults_name, defaults): + """ + Save the current input for script to cache as the default for next run + """ + cache_folder = Path(".bsq_cache") + cache_folder.mkdir(exist_ok=True) + defaults_cache = cache_folder / f"{defaults_name}_defaults.json" + with open(defaults_cache, "w") as f: + json.dump(defaults, f) diff --git a/buildstock_query/tools/upgrades_analyzer.py b/buildstock_query/tools/upgrades_analyzer.py index 328268a..4d720bb 100644 --- a/buildstock_query/tools/upgrades_analyzer.py +++ b/buildstock_query/tools/upgrades_analyzer.py @@ -11,9 +11,10 @@ from typing import Optional from collections import defaultdict from pathlib import Path -from .logic_parser import LogicParser +from buildstock_query.tools.logic_parser import LogicParser from tabulate import tabulate -from buildstock_query.helpers import read_csv +from buildstock_query.helpers import read_csv, load_script_defaults, save_script_defaults +import json logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -353,7 +354,7 @@ def _normalize_lists(logic, parent=None): else: return logic - def _get_options_application_count_report(self, logic_dict) -> Optional[pd.DataFrame]: + def _get_options_application_count_report(self, logic_dict) -> pd.DataFrame: """ For a given logic dictionary, this method will return a report df of options application. Example report below: @@ -369,10 +370,6 @@ def _get_options_application_count_report(self, logic_dict) -> Optional[pd.DataF 5 1, 6, 8, 13, 14 42 (0.0%) 296 (0.3%) 2959 (3.0%) """ - n_options = len(logic_dict) - if n_options < 2: - return None - logic_df = pd.DataFrame(logic_dict) nbldgs = len(logic_df) opts2count = logic_df.apply(lambda row: tuple(indx+1 for indx, val in enumerate(row) if val), @@ -396,10 +393,7 @@ def _get_options_application_count_report(self, logic_dict) -> Optional[pd.DataF application_report_rows.append(record) assert cum_count_all <= nbldgs, "Cumulative count of options applied is more than total number of buildings." - if application_report_rows: - application_report_df = pd.DataFrame(application_report_rows).set_index("Number of options") - return application_report_df - return None + return pd.DataFrame(application_report_rows).set_index("Number of options") def _get_left_out_report_all(self, upgrade_num): cfg = self.get_cfg() @@ -564,20 +558,18 @@ def _get_detailed_report_all(self, upgrade_num, normalize_logic: bool = False): report_str += f"Any of the options (or-ing) were applied to: {or_count} ({self._to_pct(or_count)}%)" + "\n" option_app_report = self._get_options_application_count_report(grouped_conds_dict) - if option_app_report is not None: - report_str += "-" * 80 + "\n" - report_str += f"Report of how the {len(grouped_conds_dict)} options were applied to the buildings." + "\n" - report_str += tabulate(option_app_report, headers='keys', tablefmt='grid', maxcolwidths=50) + "\n" + report_str += "-" * 80 + "\n" + report_str += f"Report of how the {len(grouped_conds_dict)} options were applied to the buildings." + "\n" + report_str += tabulate(option_app_report, headers='keys', tablefmt='grid', maxcolwidths=50) + "\n" detailed_app_report_df = self._get_options_application_count_report(conds_dict) - if detailed_app_report_df is not None: - report_str += "-" * 80 + "\n" - if len(detailed_app_report_df) > 100: - report_str += "Detailed report is skipped because of too many rows. " + "\n" - report_str += "Ask the developer if this is useful to see" + "\n" - else: - report_str += f"Detailed report of how the {n_options} options were applied to the buildings." + "\n" - report_str += tabulate(option_app_report, headers='keys', tablefmt='grid', maxcolwidths=50) + "\n" + report_str += "-" * 80 + "\n" + if len(detailed_app_report_df) > 100: + report_str += "Detailed report is skipped because of too many rows. " + "\n" + report_str += "Ask the developer if this is useful to see" + "\n" + else: + report_str += f"Detailed report of how the {n_options} options were applied to the buildings." + "\n" + report_str += tabulate(detailed_app_report_df, headers='keys', tablefmt='grid', maxcolwidths=50) + "\n" return or_array, report_str def _to_pct(self, count, total=None): @@ -647,24 +639,30 @@ def save_detailed_report_all(self, file_path: str, logic_transform=None): def main(): + defaults = load_script_defaults("project_info") yaml_file = inquirer.filepath( - message="Project configuration file (EUSS-project-file.yml):", + message="Project configuration file (the yaml file):", + default=defaults.get("yaml_file", ""), validate=PathValidator(), - filter=lambda x: x or "EUSS-project-file.yml", ).execute() buildstock_file = inquirer.filepath( message="Project sample file (buildstock.csv):", + default=defaults.get("buildstock_file", ""), validate=PathValidator(), - filter=lambda x: x or "buildstock.csv", ).execute() opt_sat_file = inquirer.filepath( message="Path to option_saturation.csv file", + default=defaults.get("opt_sat_file", ""), validate=PathValidator() ).execute() output_prefix = inquirer.text( message="output file name prefix:", - filter=lambda x: "" if x is None else f"{x}_", + default=defaults.get("output_prefix", ""), + filter=lambda x: "" if x is None else f"{x}", ).execute() + defaults.update({"yaml_file": yaml_file, "buildstock_file": buildstock_file, "opt_sat_file": opt_sat_file, + "output_prefix": output_prefix}) + save_script_defaults("project_info", defaults) ua = UpgradesAnalyzer(yaml_file, buildstock_file, opt_sat_file) report_df = ua.get_report() folder_path = Path.cwd() diff --git a/buildstock_query/tools/upgrades_visualizer/upgrades_visualizer.py b/buildstock_query/tools/upgrades_visualizer/upgrades_visualizer.py index 97a6305..f3ff5ca 100644 --- a/buildstock_query/tools/upgrades_visualizer/upgrades_visualizer.py +++ b/buildstock_query/tools/upgrades_visualizer/upgrades_visualizer.py @@ -20,6 +20,7 @@ from buildstock_query.tools.upgrades_visualizer.viz_data import VizData from buildstock_query.tools.upgrades_visualizer.plot_utils import PlotParams, ValueTypes, SavingsTypes from buildstock_query.tools.upgrades_visualizer.figure import UpgradesPlot +from buildstock_query.helpers import load_script_defaults, save_script_defaults import polars as pl # os.chdir("/Users/radhikar/Documents/eulpda/EULP-data-analysis/eulpda/smart_query/") @@ -811,25 +812,28 @@ def update_figure(view_tab, grp_by, fuel, enduse, graph_type, savings_type, chng def main(): print("Welcome to Upgrades Visualizer.") - yaml_path = inquirer.text(message="Please enter path to the buildstock configuration yml file: ", - default="").execute() - opt_sat_path = inquirer.text(message="Please enter path to the options saturation csv file: ", - default="").execute() + defaults = load_script_defaults("project_info") + yaml_file = inquirer.text(message="Please enter path to the buildstock configuration yml file: ", + default=defaults.get("yaml_file", "")).execute() + opt_sat_file = inquirer.text(message="Please enter path to the options saturation csv file: ", + default=defaults.get("opt_sat_file", "")).execute() workgroup = inquirer.text(message="Please Athena workgroup name: ", - default="rescore").execute() + default=defaults.get("workgroup", "")).execute() db_name = inquirer.text(message="Please enter database_name " "(found in postprocessing:aws:athena in the buildstock configuration file): ", - default='').execute() + default=defaults.get("db_name", "")).execute() table_name = inquirer.text(message="Please enter table name (same as output folder name; found under " "output_directory in the buildstock configuration file). [Enter two names " "separated by comma if baseline and upgrades are in different run] :", - default="" + default=defaults.get("table_name", "") ).execute() - + defaults.update({"yaml_file": yaml_file, "opt_sat_file": opt_sat_file, "workgroup": workgroup, + "db_name": db_name, "table_name": table_name}) + save_script_defaults("project_info", defaults) if ',' in table_name: table_name = table_name.split(',') - app = _get_app(yaml_path=yaml_path, - opt_sat_path=opt_sat_path, + app = _get_app(yaml_path=yaml_file, + opt_sat_path=opt_sat_file, workgroup=workgroup, db_name=db_name, table_name=table_name) From 163dba68f37720ceb9baf371b8186cf5cf557d3a Mon Sep 17 00:00:00 2001 From: Rajendra Adhikari Date: Thu, 21 Dec 2023 14:08:24 -0600 Subject: [PATCH 2/2] Remove unused import --- buildstock_query/tools/upgrades_analyzer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/buildstock_query/tools/upgrades_analyzer.py b/buildstock_query/tools/upgrades_analyzer.py index 4d720bb..eae2437 100644 --- a/buildstock_query/tools/upgrades_analyzer.py +++ b/buildstock_query/tools/upgrades_analyzer.py @@ -14,7 +14,6 @@ from buildstock_query.tools.logic_parser import LogicParser from tabulate import tabulate from buildstock_query.helpers import read_csv, load_script_defaults, save_script_defaults -import json logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__)