diff --git a/apps/pv_opt/pv_opt.py b/apps/pv_opt/pv_opt.py index ce456b2..234c8bf 100644 --- a/apps/pv_opt/pv_opt.py +++ b/apps/pv_opt/pv_opt.py @@ -2,6 +2,7 @@ import re import time from json import dumps + import appdaemon.adbase as ad import appdaemon.plugins.hass.hassapi as hass import appdaemon.plugins.mqtt.mqttapi as mqtt @@ -334,7 +335,9 @@ }, "solar_forecast": { "default": "Solcast", - "attributes": {"options": ["Solcast", "Solcast_p10", "Solcast_p90", "Weighted"]}, + "attributes": { + "options": ["Solcast", "Solcast_p10", "Solcast_p90", "Weighted"] + }, "domain": "select", }, "id_solcast_today": {"default": "sensor.solcast_pv_forecast_forecast_today"}, @@ -495,7 +498,9 @@ def initialize(self): retry_count = 0 while (not self.inverter.is_online()) and (retry_count < ONLINE_RETRIES): - self.log("Inverter controller appears not to be running. Waiting 5 secomds to re-try") + self.log( + "Inverter controller appears not to be running. Waiting 5 secomds to re-try" + ) time.sleep(5) retry_count += 1 @@ -530,7 +535,10 @@ def initialize(self): # self._estimate_capacity() self._load_pv_system_model() self._load_contract() - self.ev = self.get_config("ev_charger") in DEFAULT_CONFIG["ev_charger"]["attributes"]["options"][1:] + self.ev = ( + self.get_config("ev_charger") + in DEFAULT_CONFIG["ev_charger"]["attributes"]["options"][1:] + ) self._check_for_zappi() if self.get_config("alt_tariffs") is not None: @@ -559,7 +567,9 @@ def initialize(self): if self.debug: self.log(f"PV Opt Initialisation complete. Listen_state Handles:") for id in self.handles: - self.log(f" {id} {self.handles[id]} {self.info_listen_state(self.handles[id])}") + self.log( + f" {id} {self.handles[id]} {self.info_listen_state(self.handles[id])}" + ) @ad.app_lock def _run_test(self): @@ -622,7 +632,9 @@ def _get_io(self): self.rlog(f" {k:20s} {self.io_dispatch_attrib[k]}") for k in [x for x in self.io_dispatch_attrib.keys() if "dispatches" in x]: - self.log(f" {k:20s} {'Start':20s} {'End':20s} {'Charge':12s} {'Source':12s}") + self.log( + f" {k:20s} {'Start':20s} {'End':20s} {'Charge':12s} {'Source':12s}" + ) self.log(f" {'-'*20} {'-'*20} {'-'*20} {'-'*12} {'-'*12} ") for z in self.io_dispatch_attrib[k]: self.log( @@ -633,7 +645,13 @@ def _get_io(self): def _check_for_zappi(self): self.ulog("Checking for Zappi Sensors") sensor_entities = self.get_state("sensor") - self.zappi_entities = [k for k in sensor_entities if "zappi" in k for x in ["charge_added_session"] if x in k] + self.zappi_entities = [ + k + for k in sensor_entities + if "zappi" in k + for x in ["charge_added_session"] + if x in k + ] if len(self.zappi_entities) > 0: for entity_id in self.zappi_entities: zappi_sn = entity_id.split("_")[2] @@ -646,7 +664,9 @@ def _check_for_zappi(self): def _get_zappi(self, start, end, log=False): df = pd.DataFrame() for entity_id in self.zappi_entities: - df = self._get_hass_power_from_daily_kwh(entity_id, start=start, end=end, log=log) + df = self._get_hass_power_from_daily_kwh( + entity_id, start=start, end=end, log=log + ) if log: self.rlog(f">>> Zappi entity {entity_id}") self.log(f">>>\n{df.to_string()}") @@ -703,8 +723,12 @@ def _load_inverter(self): if self.inverter_type in INVERTER_TYPES: inverter_brand = self.inverter_type.split("_")[0].lower() InverterController = importName(f"{inverter_brand}", "InverterController") - self.log(f"Inverter type: {self.inverter_type}: inverter module: {inverter_brand}.py") - self.inverter = InverterController(inverter_type=self.inverter_type, host=self) + self.log( + f"Inverter type: {self.inverter_type}: inverter module: {inverter_brand}.py" + ) + self.inverter = InverterController( + inverter_type=self.inverter_type, host=self + ) self.log(f" Device name: {self.device_name}") self.rlog(f" Serial number: {self.inverter_sn}") @@ -726,10 +750,14 @@ def _load_pv_system_model(self): self.battery_model = pv.BatteryModel( capacity=self.get_config("battery_capacity_wh"), max_dod=self.get_config("maximum_dod_percent", 15) / 100, - current_limit_amps=self.get_config("battery_current_limit_amps", default=100), + current_limit_amps=self.get_config( + "battery_current_limit_amps", default=100 + ), voltage=self.get_config("battery_voltage", default=50), ) - self.pv_system = pv.PVsystemModel("PV_Opt", self.inverter_model, self.battery_model, host=self) + self.pv_system = pv.PVsystemModel( + "PV_Opt", self.inverter_model, self.battery_model, host=self + ) # def _setup_agile_schedule(self): # start = (pd.Timestamp.now(tz="UTC") + pd.Timedelta(1, "minutes")).to_pydatetime() @@ -740,7 +768,9 @@ def _load_pv_system_model(self): # ) def _setup_compare_schedule(self): - start = (pd.Timestamp.now(tz="UTC").ceil("60min") - pd.Timedelta("2min")).to_pydatetime() + start = ( + pd.Timestamp.now(tz="UTC").ceil("60min") - pd.Timedelta("2min") + ).to_pydatetime() self.timer_handle = self.run_every( self._compare_tariff_cb, start=start, @@ -772,7 +802,9 @@ def _cost_actual(self, **kwargs): grid = grid.set_axis(cols, axis=1).fillna(0) grid["grid_export"] *= -1 - cost_today = self.contract.net_cost(grid_flow=grid, log=self.debug, day_ahead=False) + cost_today = self.contract.net_cost( + grid_flow=grid, log=self.debug, day_ahead=False + ) return cost_today @@ -785,13 +817,23 @@ def get_config(self, item, default=None): return self._value_from_state(self.config_state[item]) if item in self.config: - if isinstance(self.config[item], str) and self.entity_exists(self.config[item]): + if isinstance(self.config[item], str) and self.entity_exists( + self.config[item] + ): x = self.get_ha_value(entity_id=self.config[item]) return x elif isinstance(self.config[item], list): if min([isinstance(x, str)] for x in self.config[item])[0]: - if min([self.entity_exists(entity_id=entity_id) for entity_id in self.config[item]]): - l = [self.get_ha_value(entity_id=entity_id) for entity_id in self.config[item]] + if min( + [ + self.entity_exists(entity_id=entity_id) + for entity_id in self.config[item] + ] + ): + l = [ + self.get_ha_value(entity_id=entity_id) + for entity_id in self.config[item] + ] try: return sum(l) except: @@ -804,7 +846,11 @@ def get_config(self, item, default=None): return default def _setup_schedule(self): - start_opt = pd.Timestamp.now().ceil(f"{self.get_config('optimise_frequency_minutes')}min").to_pydatetime() + start_opt = ( + pd.Timestamp.now() + .ceil(f"{self.get_config('optimise_frequency_minutes')}min") + .to_pydatetime() + ) self.timer_handle = self.run_every( self.optimise_time, start=start_opt, @@ -836,21 +882,30 @@ def _load_contract(self): octopus_entities = [ name - for name in self.get_state_retry(BOTTLECAP_DAVE["domain"]).keys() - if ("octopus_energy_electricity" in name and BOTTLECAP_DAVE["rates"] in name) + for name in self.get_state_retry( + BOTTLECAP_DAVE["domain"] + ).keys() + if ( + "octopus_energy_electricity" in name + and BOTTLECAP_DAVE["rates"] in name + ) ] entities = {} - entities["import"] = [x for x in octopus_entities if not "export" in x] + entities["import"] = [ + x for x in octopus_entities if not "export" in x + ] entities["export"] = [x for x in octopus_entities if "export" in x] for imp_exp in IMPEXP: for entity in entities[imp_exp]: - tariff_code = self.get_state_retry(entity, attribute="all")["attributes"].get( - BOTTLECAP_DAVE["tariff_code"], None - ) + tariff_code = self.get_state_retry(entity, attribute="all")[ + "attributes" + ].get(BOTTLECAP_DAVE["tariff_code"], None) - self.rlog(f" Found {imp_exp} entity {entity}: Tariff code: {tariff_code}") + self.rlog( + f" Found {imp_exp} entity {entity}: Tariff code: {tariff_code}" + ) tariffs = {x: None for x in IMPEXP} for imp_exp in IMPEXP: @@ -858,9 +913,9 @@ def _load_contract(self): self.log(f">>>{imp_exp}: {entities[imp_exp]}") if len(entities[imp_exp]) > 0: for entity in entities[imp_exp]: - tariff_code = self.get_state_retry(entity, attribute="all")["attributes"].get( - BOTTLECAP_DAVE["tariff_code"], None - ) + tariff_code = self.get_state_retry( + entity, attribute="all" + )["attributes"].get(BOTTLECAP_DAVE["tariff_code"], None) if self.debug: self.log(f">>>_load_contract {tariff_code}") @@ -874,7 +929,9 @@ def _load_contract(self): if "AGILE" in tariff_code: self.agile = True # Tariff is Octopus Agile if "INTELLI" in tariff_code: - self.intelligent = True # Tariff is Octopus Intelligent + self.intelligent = ( + True # Tariff is Octopus Intelligent + ) self.contract = pv.Contract( "current", @@ -896,8 +953,12 @@ def _load_contract(self): self.contract = None if self.contract is None: - if ("octopus_account" in self.config) and ("octopus_api_key" in self.config): - if (self.config["octopus_account"] is not None) and (self.config["octopus_api_key"] is not None): + if ("octopus_account" in self.config) and ( + "octopus_api_key" in self.config + ): + if (self.config["octopus_account"] is not None) and ( + self.config["octopus_api_key"] is not None + ): for x in ["octopus_account", "octopus_api_key"]: if self.config[x] not in self.redact_regex: self.redact_regex.append(x) @@ -936,7 +997,9 @@ def _load_contract(self): "octopus_import_tariff_code" in self.config and self.config["octopus_import_tariff_code"] is not None ): - self.rlog(f"Trying to load tariff codes: Import: {self.config['octopus_import_tariff_code']}") + self.rlog( + f"Trying to load tariff codes: Import: {self.config['octopus_import_tariff_code']}" + ) # try: # First load the import as we always need that tariffs["import"] = pv.Tariff( @@ -949,7 +1012,9 @@ def _load_contract(self): if tariffs["import"] is not None: if "octopus_export_tariff_code" in self.config: - self.rlog(f"Trying to load tariff codes: Export: {self.config['octopus_export_tariff_code']}") + self.rlog( + f"Trying to load tariff codes: Export: {self.config['octopus_export_tariff_code']}" + ) tariffs["export"] = pv.Tariff( self.config[f"octopus_export_tariff_code"], export=False, @@ -964,13 +1029,17 @@ def _load_contract(self): exp=tariffs["export"], host=self, ) - self.rlog("Contract tariffs loaded OK from Tariff Codes / Manual Spec") + self.rlog( + "Contract tariffs loaded OK from Tariff Codes / Manual Spec" + ) # except Exception as e: # self.rlog(f"Unable to load Tariff Codes {e}", level="ERROR") if self.contract is None: i += 1 - self.rlog(f"Failed to load contact - Attempt {i} of {n}. Waiting 2 minutes to re-try") + self.rlog( + f"Failed to load contact - Attempt {i} of {n}. Waiting 2 minutes to re-try" + ) time.sleep(12) if self.contract is None: @@ -985,7 +1054,9 @@ def _load_contract(self): else: self.contract_last_loaded = pd.Timestamp.now(tz="UTC") if self.contract.tariffs["export"] is None: - self.contract.tariffs["export"] = pv.Tariff("None", export=True, unit=0, octopus=False, host=self) + self.contract.tariffs["export"] = pv.Tariff( + "None", export=True, unit=0, octopus=False, host=self + ) self.rlog("") self._load_saving_events() @@ -1055,24 +1126,34 @@ def _check_tariffs(self): def _load_saving_events(self): if ( - len([name for name in self.get_state_retry("event").keys() if ("octoplus_saving_session_events" in name)]) + len( + [ + name + for name in self.get_state_retry("event").keys() + if ("octoplus_saving_session_events" in name) + ] + ) > 0 ): saving_events_entity = [ - name for name in self.get_state_retry("event").keys() if ("octoplus_saving_session_events" in name) + name + for name in self.get_state_retry("event").keys() + if ("octoplus_saving_session_events" in name) ][0] self.log("") self.rlog(f"Found Octopus Savings Events entity: {saving_events_entity}") - octopus_account = self.get_state_retry(entity_id=saving_events_entity, attribute="account_id") + octopus_account = self.get_state_retry( + entity_id=saving_events_entity, attribute="account_id" + ) self.config["octopus_account"] = octopus_account if octopus_account not in self.redact_regex: self.redact_regex.append(octopus_account) self.redact_regex.append(octopus_account.lower().replace("-", "_")) - available_events = self.get_state_retry(saving_events_entity, attribute="all")["attributes"][ - "available_events" - ] + available_events = self.get_state_retry( + saving_events_entity, attribute="all" + )["attributes"]["available_events"] if len(available_events) > 0: self.log("Joining the following new Octoplus Events:") @@ -1088,12 +1169,14 @@ def _load_saving_events(self): event_code=event["code"], ) - joined_events = self.get_state_retry(saving_events_entity, attribute="all")["attributes"]["joined_events"] + joined_events = self.get_state_retry(saving_events_entity, attribute="all")[ + "attributes" + ]["joined_events"] for event in joined_events: - if event["id"] not in self.saving_events and pd.Timestamp(event["end"], tz="UTC") > pd.Timestamp.now( - tz="UTC" - ): + if event["id"] not in self.saving_events and pd.Timestamp( + event["end"], tz="UTC" + ) > pd.Timestamp.now(tz="UTC"): self.saving_events[event["id"]] = event self.log("") @@ -1115,7 +1198,9 @@ def get_ha_value(self, entity_id): # if the state is None return None if state is not None: - if (state in ["unknown", "unavailable"]) and (entity_id[:6] != "button"): + if (state in ["unknown", "unavailable"]) and ( + entity_id[:6] != "button" + ): e = f"HA returned {state} for state of {entity_id}" self.status(f"ERROR: {e}") self.log(e, level="ERROR") @@ -1149,7 +1234,9 @@ def get_default_config(self, item): def same_type(self, a, b): if type(a) != type(b): - (isinstance(a, int) | isinstance(a, float)) & (isinstance(b, int) | isinstance(b, float)) + (isinstance(a, int) | isinstance(a, float)) & ( + isinstance(b, int) | isinstance(b, float) + ) else: return True @@ -1181,7 +1268,12 @@ def _load_args(self, items=None): self.args[item] = [self.args[item]] values = [ - (v.replace("{device_name}", self.device_name) if isinstance(v, str) else v) for v in self.args[item] + ( + v.replace("{device_name}", self.device_name) + if isinstance(v, str) + else v + ) + for v in self.args[item] ] if values[0] is None: @@ -1202,8 +1294,12 @@ def _load_args(self, items=None): str1 = "" str2 = " " - self.rlog(f" {str1:34s} {str2} {x['name']:27s} Import: {x['octopus_import_tariff_code']:>36s}") - self.rlog(f" {'':34s} {'':27s} Export: {x['octopus_export_tariff_code']:>36s}") + self.rlog( + f" {str1:34s} {str2} {x['name']:27s} Import: {x['octopus_import_tariff_code']:>36s}" + ) + self.rlog( + f" {'':34s} {'':27s} Export: {x['octopus_export_tariff_code']:>36s}" + ) self.yaml_config[item] = self.config[item] elif item == "consumption_shape": @@ -1219,7 +1315,9 @@ def _load_args(self, items=None): str2 = " " str3 = " " str4 = " " - self.rlog(f" {str1:34s} {str2} {str3} {x['hour']:5.2f} {str4} {x['consumption']:5.0f} W") + self.rlog( + f" {str1:34s} {str2} {str3} {x['hour']:5.2f} {str4} {x['consumption']:5.0f} W" + ) self.yaml_config[item] = self.config[item] elif re.match("^manual_..port_tariff_unit$", item): @@ -1242,7 +1340,9 @@ def _load_args(self, items=None): elif "id_" in item: if self.debug: - self.log(f">>> Test: {self.entity_exists('update.home_assistant_core_update')}") + self.log( + f">>> Test: {self.entity_exists('update.home_assistant_core_update')}" + ) for v in values: self.log(f">>> {item} {v} {self.entity_exists(v)}") if min([self.entity_exists(v) for v in values]): @@ -1274,12 +1374,18 @@ def _load_args(self, items=None): for value in self.args[item]: self.rlog(f"\t{value}") - arg_types = {t: [isinstance(v, t) for v in values] for t in [str, float, int, bool]} + arg_types = { + t: [isinstance(v, t) for v in values] + for t in [str, float, int, bool] + } if ( len(values) == 1 and isinstance(values[0], str) - and (pd.to_datetime(values[0], errors="coerce", format="%H:%M") != pd.NaT) + and ( + pd.to_datetime(values[0], errors="coerce", format="%H:%M") + != pd.NaT + ) ): self.config[item] = values[0] self.rlog( @@ -1291,7 +1397,10 @@ def _load_args(self, items=None): if self.debug: self.rlog("\tFound a valid list of strings") - if isinstance(self.get_default_config(item), str) and len(values) == 1: + if ( + isinstance(self.get_default_config(item), str) + and len(values) == 1 + ): self.config[item] = values[0] self.rlog( f" {item:34s} = {str(self.config[item]):57s} {str(self.get_config(item)):>6s}: value in YAML" @@ -1301,13 +1410,16 @@ def _load_args(self, items=None): else: ha_values = [self.get_ha_value(entity_id=v) for v in values] val_types = { - t: np.array([isinstance(v, t) for v in ha_values]) for t in [str, float, int, bool] + t: np.array([isinstance(v, t) for v in ha_values]) + for t in [str, float, int, bool] } # if they are all float or int valid_strings = [ j - for j in [h for h in zip(ha_values[:-1], values[:-1]) if h[0]] + for j in [ + h for h in zip(ha_values[:-1], values[:-1]) if h[0] + ] if j[0] in DEFAULT_CONFIG[item]["options"] ] @@ -1327,7 +1439,9 @@ def _load_args(self, items=None): ) elif len(values) > 1: - if self.same_type(values[-1], self.get_default_config(item)): + if self.same_type( + values[-1], self.get_default_config(item) + ): self.config[item] = values[-1] self.rlog( f" {item:34s} = {str(self.config[item]):57s} {str(self.get_config(item)):>6s}: YAML default. Unable to read from HA entities listed in YAML." @@ -1347,7 +1461,10 @@ def _load_args(self, items=None): f" {item:34s} = {str(self.config[item]):57s} {str(self.get_config(item)):>6s}: system default. Unable to read from HA entities listed in YAML. No default in YAML.", level="WARNING", ) - elif item in self.inverter.config or item in self.inverter.brand_config: + elif ( + item in self.inverter.config + or item in self.inverter.brand_config + ): self.config[item] = self.get_default_config(item) self.rlog( @@ -1360,7 +1477,9 @@ def _load_args(self, items=None): f" {item:34s} = {str(self.config[item]):57s} {str(self.get_config(item)):>6s}: YAML default value. No default defined." ) - elif len(values) == 1 and (arg_types[bool][0] or arg_types[int][0] or arg_types[float][0]): + elif len(values) == 1 and ( + arg_types[bool][0] or arg_types[int][0] or arg_types[float][0] + ): if self.debug: self.rlog("\tFound a single default value") @@ -1373,12 +1492,21 @@ def _load_args(self, items=None): elif ( len(values) > 1 and (min(arg_types[str][:-1])) - and (arg_types[bool][-1] or arg_types[int][-1] or arg_types[float][-1]) + and ( + arg_types[bool][-1] + or arg_types[int][-1] + or arg_types[float][-1] + ) ): if self.debug: - self.rlog("\tFound a valid list of strings followed by a single default value") + self.rlog( + "\tFound a valid list of strings followed by a single default value" + ) ha_values = [self.get_ha_value(entity_id=v) for v in values[:-1]] - val_types = {t: np.array([isinstance(v, t) for v in ha_values]) for t in [str, float, int, bool]} + val_types = { + t: np.array([isinstance(v, t) for v in ha_values]) + for t in [str, float, int, bool] + } # if they are all float or int if np.min(val_types[int] | val_types[float]): self.config[item] = sum(ha_values) @@ -1470,7 +1598,9 @@ def _expose_configs(self, over_write=True): state_topic = f"homeassistant/{domain}/{id}/state" if not self.entity_exists(entity_id=entity_id): - self.log(f" - Creating HA Entity {entity_id} for {item} using MQTT Discovery") + self.log( + f" - Creating HA Entity {entity_id} for {item} using MQTT Discovery" + ) conf = ( { "state_topic": state_topic, @@ -1498,13 +1628,18 @@ def _expose_configs(self, over_write=True): elif ( isinstance(self.get_ha_value(entity_id=entity_id), str) - and (self.get_ha_value(entity_id=entity_id) not in attributes.get("options", {})) + and ( + self.get_ha_value(entity_id=entity_id) + not in attributes.get("options", {}) + ) and (domain not in ["text", "button"]) ) or (self.get_ha_value(entity_id=entity_id) is None): state = self._state_from_value(self.get_default_config(item)) - self.log(f" - Found unexpected str for {entity_id} reverting to default of {state}") + self.log( + f" - Found unexpected str for {entity_id} reverting to default of {state}" + ) self.set_state(state=state, entity_id=entity_id) @@ -1517,13 +1652,21 @@ def _expose_configs(self, over_write=True): self.log("Over-writing HA from YAML:") self.log("--------------------------") self.log("") - self.log(f" {'Config Item':40s} {'HA Entity':42s} Old State New State") - self.log(f" {'-----------':40s} {'---------':42s} ---------- ----------") + self.log( + f" {'Config Item':40s} {'HA Entity':42s} Old State New State" + ) + self.log( + f" {'-----------':40s} {'---------':42s} ---------- ----------" + ) over_write_log = True - str_log = f" {item:40s} {entity_id:42s} {state:10s} {new_state:10s}" + str_log = ( + f" {item:40s} {entity_id:42s} {state:10s} {new_state:10s}" + ) over_write_count = 0 - while (state != new_state) and (over_write_count < OVERWRITE_ATTEMPTS): + while (state != new_state) and ( + over_write_count < OVERWRITE_ATTEMPTS + ): self.set_state(state=new_state, entity_id=entity_id) time.sleep(0.1) state = self.get_state_retry(entity_id) @@ -1554,7 +1697,9 @@ def _expose_configs(self, over_write=True): for entity_id in self.change_items: if not "sensor" in entity_id: item = self.change_items[entity_id] - self.log(f" {item:40s} {entity_id:42s} {self.config_state[item]}") + self.log( + f" {item:40s} {entity_id:42s} {self.config_state[item]}" + ) self.handles[entity_id] = self.listen_state( callback=self.optimise_state_change, entity_id=entity_id ) @@ -1566,7 +1711,9 @@ def _expose_configs(self, over_write=True): def status(self, status): entity_id = f"sensor.{self.prefix.lower()}_status" - attributes = {"last_updated": pd.Timestamp.now().strftime(DATE_TIME_FORMAT_LONG)} + attributes = { + "last_updated": pd.Timestamp.now().strftime(DATE_TIME_FORMAT_LONG) + } # self.log(f">>> {status}") # self.log(f">>> {entity_id}") self.set_state(state=status, entity_id=entity_id, attributes=attributes) @@ -1574,7 +1721,9 @@ def status(self, status): @ad.app_lock def optimise_state_change(self, entity_id, attribute, old, new, kwargs): item = self.change_items[entity_id] - self.log(f"State change detected for {entity_id} [config item: {item}] from {old} to {new}:") + self.log( + f"State change detected for {entity_id} [config item: {item}] from {old} to {new}:" + ) self.config_state[item] = new @@ -1643,7 +1792,9 @@ def optimise(self): if self.io: self._get_io() - if self.get_config("forced_discharge") and (self.get_config("supports_forced_discharge", True)): + if self.get_config("forced_discharge") and ( + self.get_config("supports_forced_discharge", True) + ): discharge_enable = "enabled" else: discharge_enable = "disabled" @@ -1654,19 +1805,23 @@ def optimise(self): self.ulog("Checking tariffs:") - self.log(f" Contract last loaded at {self.contract_last_loaded.strftime(DATE_TIME_FORMAT_SHORT)}") + self.log( + f" Contract last loaded at {self.contract_last_loaded.strftime(DATE_TIME_FORMAT_SHORT)}" + ) if self.agile: - if (self.contract.tariffs["import"].end().day == pd.Timestamp.now().day) and ( - pd.Timestamp.now(tz=self.tz).hour >= 16 - ): + if ( + self.contract.tariffs["import"].end().day == pd.Timestamp.now().day + ) and (pd.Timestamp.now(tz=self.tz).hour >= 16): self.log( f"Contract end day: {self.contract.tariffs['import'].end().day} Today:{pd.Timestamp.now().day}" ) self._load_contract() # If intelligent tariff, load at 4.40pm (rather than 4pm to cut down number of reloads) elif self.intelligent: - if (pd.Timestamp.now(tz=self.tz).hour == 16) and (pd.Timestamp.now(tz=self.tz).minute >= 40): + if (pd.Timestamp.now(tz=self.tz).hour == 16) and ( + pd.Timestamp.now(tz=self.tz).minute >= 40 + ): self.log(" About to reload Octopus Intelligent Tariff") self._load_contract() @@ -1723,7 +1878,9 @@ def optimise(self): if self.debug: self.log(f">>> soc_now: {self.soc_now}") self.log(f">>> x: {x}") - self.log(f">>> Original: {x.loc[x.loc[: self.static.index[0]].index[-1] :]}") + self.log( + f">>> Original: {x.loc[x.loc[: self.static.index[0]].index[-1] :]}" + ) try: self.soc_now = float(self.soc_now) @@ -1742,7 +1899,9 @@ def optimise(self): x = x.loc[x.loc[: self.static.index[0]].index[-1] :] if self.debug: - self.log(f">>> Fixed : {x.loc[x.loc[: self.static.index[0]].index[-1] :]}") + self.log( + f">>> Fixed : {x.loc[x.loc[: self.static.index[0]].index[-1] :]}" + ) x = pd.concat( [ @@ -1782,9 +1941,7 @@ def optimise(self): self.log("") if self.get_config("use_solar", True): - str_log = ( - f'Optimising for Solcast {self.get_config("solcast_confidence_level")}% confidence level forecast' - ) + str_log = f'Optimising for Solcast {self.get_config("solcast_confidence_level")}% confidence level forecast' else: str_log = "Optimising without Solar" @@ -1838,13 +1995,17 @@ def optimise(self): cost_today = self._cost_actual().sum() self.summary_costs = { "Base": { - "cost": ((self.optimised_cost["Base"].sum() + cost_today) / 100).round(2), + "cost": ((self.optimised_cost["Base"].sum() + cost_today) / 100).round( + 2 + ), "Selected": "", } } for case in cases: str_log = f" {f'Optimised cost ({case}):':40s} {self.optimised_cost[case].sum():6.1f}p" - self.summary_costs[case] = {"cost": ((self.optimised_cost[case].sum() + cost_today) / 100).round(2)} + self.summary_costs[case] = { + "cost": ((self.optimised_cost[case].sum() + cost_today) / 100).round(2) + } if case == self.selected_case: self.summary_costs[case]["Selected"] = " <=== Current Setup" else: @@ -1896,13 +2057,20 @@ def optimise(self): status = self.inverter.status self._log_inverterstatus(status) - time_to_slot_start = (self.charge_start_datetime - pd.Timestamp.now(self.tz)).total_seconds() / 60 - time_to_slot_end = (self.charge_end_datetime - pd.Timestamp.now(self.tz)).total_seconds() / 60 + time_to_slot_start = ( + self.charge_start_datetime - pd.Timestamp.now(self.tz) + ).total_seconds() / 60 + time_to_slot_end = ( + self.charge_end_datetime - pd.Timestamp.now(self.tz) + ).total_seconds() / 60 # if len(self.windows) > 0: if ( (time_to_slot_start > 0) - and (time_to_slot_start < self.get_config("optimise_frequency_minutes")) + and ( + time_to_slot_start + < self.get_config("optimise_frequency_minutes") + ) and (len(self.windows) > 0) ) or (self.get_config("id_battery_soc") < self.get_config("sleep_soc")): # Next slot starts before the next optimiser run. This implies we are not currently in @@ -1913,7 +2081,9 @@ def optimise(self): f"Current SOC of {self.get_config('id_battery_soc'):0.1f}% is less than battery_sleep SOC of {self.get_config('sleep_soc'):0.1f}%" ) elif len(self.windows) > 0: - self.log(f"Next charge/discharge window starts in {time_to_slot_start:0.1f} minutes.") + self.log( + f"Next charge/discharge window starts in {time_to_slot_start:0.1f} minutes." + ) else: self.log("No charge/discharge windows planned.") @@ -1941,7 +2111,10 @@ def optimise(self): elif ( (time_to_slot_start <= 0) - and (time_to_slot_start < self.get_config("optimise_frequency_minutes")) + and ( + time_to_slot_start + < self.get_config("optimise_frequency_minutes") + ) and (len(self.windows) > 0) ): # We are currently in a charge/discharge slot @@ -1949,8 +2122,13 @@ def optimise(self): # If the current slot is a Hold SOC slot and we aren't holding then we need to # enable Hold SOC if self.hold and self.hold[0]["active"]: - if not status["hold_soc"]["active"] or status["hold_soc"]["soc"] != self.hold[0]["soc"]: - self.log(f" Enabling SOC hold at SOC of {self.hold[0]['soc']:0.0f}%") + if ( + not status["hold_soc"]["active"] + or status["hold_soc"]["soc"] != self.hold[0]["soc"] + ): + self.log( + f" Enabling SOC hold at SOC of {self.hold[0]['soc']:0.0f}%" + ) self.inverter.hold_soc( enable=True, soc=self.hold[0]["soc"], @@ -1958,10 +2136,14 @@ def optimise(self): end=self.charge_end_datetime, ) else: - self.log(f" Inverter already holding SOC of {self.hold[0]['soc']:0.0f}%") + self.log( + f" Inverter already holding SOC of {self.hold[0]['soc']:0.0f}%" + ) else: - self.log(f"Current charge/discharge window ends in {time_to_slot_end:0.1f} minutes.") + self.log( + f"Current charge/discharge window ends in {time_to_slot_end:0.1f} minutes." + ) if self.charge_power > 0: if not status["charge"]["active"]: @@ -2048,8 +2230,10 @@ def optimise(self): if len(self.windows) > 0: if ( direction == "charge" - and self.charge_start_datetime > status["discharge"]["start"] - and status["discharge"]["start"] != status["discharge"]["end"] + and self.charge_start_datetime + > status["discharge"]["start"] + and status["discharge"]["start"] + != status["discharge"]["end"] ): str_log += " but inverter has a discharge slot before then. Disabling discharge." self.log(str_log) @@ -2147,7 +2331,9 @@ def _create_windows(self): self.windows = pd.concat([windows, self.windows]).sort_values("start") tolerance = self.get_config("forced_power_group_tolerance") if tolerance > 0: - self.windows["forced"] = ((self.windows["forced"] / tolerance).round(0) * tolerance).astype(int) + self.windows["forced"] = ( + (self.windows["forced"] / tolerance).round(0) * tolerance + ).astype(int) self.windows["soc"] = self.windows["soc"].round(0).astype(int) self.windows["soc_end"] = self.windows["soc_end"].round(0).astype(int) @@ -2156,7 +2342,10 @@ def _create_windows(self): if self.config["supports_hold_soc"]: self.log("Checking for Hold SOC slots") self.windows.loc[ - ((self.windows["soc_end"] - self.windows["soc"]).abs() < HOLD_TOLERANCE) + ( + (self.windows["soc_end"] - self.windows["soc"]).abs() + < HOLD_TOLERANCE + ) & (self.windows["soc"] > self.get_config("maximum_dod_percent")), "hold_soc", ] = "<=" @@ -2174,7 +2363,9 @@ def _create_windows(self): self.charge_current = self.charge_power / voltage else: self.charge_current = None - self.charge_start_datetime = self.windows["start"].iloc[0].tz_convert(self.tz) + self.charge_start_datetime = ( + self.windows["start"].iloc[0].tz_convert(self.tz) + ) self.charge_end_datetime = self.windows["end"].iloc[0].tz_convert(self.tz) self.charge_target_soc = self.windows["soc_end"].iloc[0] self.hold = [ @@ -2206,7 +2397,9 @@ def _log_inverterstatus(self, status): self.log(f" {s:18s}:") for x in status[s]: if isinstance(status[s][x], pd.Timestamp): - self.log(f" {x:16s}: {status[s][x].strftime(DATE_TIME_FORMAT_SHORT)}") + self.log( + f" {x:16s}: {status[s][x].strftime(DATE_TIME_FORMAT_SHORT)}" + ) else: self.log(f" {x:16s}: {status[s][x]}") self.log("") @@ -2230,7 +2423,10 @@ def write_cost( cost_today = self._cost_actual() midnight = pd.Timestamp.now(tz="UTC").normalize() + pd.Timedelta(24, "hours") df = df.fillna(0).round(2) - df["period_start"] = df.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + ":00" + df["period_start"] = ( + df.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + + ":00" + ) cols = [ "soc", "forced", @@ -2248,7 +2444,10 @@ def write_cost( cost["cumulative_cost"] = cost["cost"].cumsum() for d in [df, cost]: - d["period_start"] = d.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + ":00" + d["period_start"] = ( + d.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + + ":00" + ) state = round((cost["cost"].sum()) / 100, 2) @@ -2259,12 +2458,17 @@ def write_cost( "state_class": "measurement", "unit_of_measurement": "GBP", "cost_today": round( - (cost["cost"].loc[: midnight - pd.Timedelta(30, "minutes")].sum()) / 100, + (cost["cost"].loc[: midnight - pd.Timedelta(30, "minutes")].sum()) + / 100, 2, ), "cost_tomorrow": round((cost["cost"].loc[midnight:].sum()) / 100, 2), } - | {col: df[["period_start", col]].to_dict("records") for col in cols if col in df.columns} + | { + col: df[["period_start", col]].to_dict("records") + for col in cols + if col in df.columns + } | {"cost": cost[["period_start", "cumulative_cost"]].to_dict("records")} | attributes ) @@ -2375,7 +2579,9 @@ def _write_output(self): def load_solcast(self): if not self.get_config("use_solar", True): df = pd.DataFrame( - index=pd.date_range(pd.Timestamp.now(tz="UTC").normalize(), periods=96, freq="30min"), + index=pd.date_range( + pd.Timestamp.now(tz="UTC").normalize(), periods=96, freq="30min" + ), data={"Solcast": 0, "Solcast_p10": 0, "Solcast_p90": 0, "weighted": 0}, ) return df @@ -2383,12 +2589,12 @@ def load_solcast(self): if self.debug: self.log("Getting Solcast data") try: - solar = self.get_state_retry(self.config["id_solcast_today"], attribute="all")["attributes"][ - "detailedForecast" - ] - solar += self.get_state_retry(self.config["id_solcast_tomorrow"], attribute="all")["attributes"][ - "detailedForecast" - ] + solar = self.get_state_retry( + self.config["id_solcast_today"], attribute="all" + )["attributes"]["detailedForecast"] + solar += self.get_state_retry( + self.config["id_solcast_tomorrow"], attribute="all" + )["attributes"]["detailedForecast"] except Exception as e: self.log(f"Failed to get solcast attributes: {e}") @@ -2427,7 +2633,9 @@ def load_solcast(self): self.log("") return - def _get_hass_power_from_daily_kwh(self, entity_id, start=None, end=None, days=None, log=False): + def _get_hass_power_from_daily_kwh( + self, entity_id, start=None, end=None, days=None, log=False + ): if days is None: days = (pd.Timestamp.now(tz="UTC") - start).days + 1 @@ -2442,8 +2650,14 @@ def _get_hass_power_from_daily_kwh(self, entity_id, start=None, end=None, days=N x = df.diff().clip(0).fillna(0).cumsum() + df.iloc[0] x.index = x.index.round("1s") x = x[~x.index.duplicated()] - y = -pd.concat([x.resample("1s").interpolate().resample("30min").asfreq(), x.iloc[-1:]]).diff(-1) - dt = y.index.diff().total_seconds() / pd.Timedelta("60min").total_seconds() / 1000 + y = -pd.concat( + [x.resample("1s").interpolate().resample("30min").asfreq(), x.iloc[-1:]] + ).diff(-1) + dt = ( + y.index.diff().total_seconds() + / pd.Timedelta("60min").total_seconds() + / 1000 + ) df = y[1:-1] / dt[2:] if start is not None: @@ -2463,7 +2677,9 @@ def load_consumption(self, start, end): if self.get_config("use_consumption_history"): time_now = pd.Timestamp.now(tz="UTC") if (start < time_now) and (end < time_now): - self.log(" - Start and end are both in past so actuals will be used with no weighting") + self.log( + " - Start and end are both in past so actuals will be used with no weighting" + ) days = (time_now - start).days + 1 else: days = int(self.get_config("consumption_history_days")) @@ -2478,7 +2694,11 @@ def load_consumption(self, start, end): if not isinstance(entity_ids, list): entity_ids = [entity_ids] - entity_ids = [entity_id for entity_id in entity_ids if self.entity_exists(entity_id)] + entity_ids = [ + entity_id + for entity_id in entity_ids + if self.entity_exists(entity_id) + ] if ( (len(entity_ids) == 0) @@ -2529,8 +2749,12 @@ def load_consumption(self, start, end): self.log(f" - {days} days was expected. {str_days}") - if (len(self.zappi_entities) > 0) and (self.get_config("ev_charger", "None") == "Zappi"): - ev_power = self._get_zappi(start=df.index[0], end=df.index[-1], log=True) + if (len(self.zappi_entities) > 0) and ( + self.get_config("ev_charger", "None") == "Zappi" + ): + ev_power = self._get_zappi( + start=df.index[0], end=df.index[-1], log=True + ) if len(ev_power) > 0: self.log("") self.log(f" Deducting EV consumption of {ev_power.sum()/2000}") @@ -2553,21 +2777,31 @@ def load_consumption(self, start, end): dfx = None if self.get_config("ev_part_of_house_load", False): - self.log("EV charger is seen as house load, so subtracting EV charging from Total consumption") + self.log( + "EV charger is seen as house load, so subtracting EV charging from Total consumption" + ) df_EV_Total = pd.concat( [ev_power, df], axis=1 ) # concatenate total consumption and ev consumption into a single dataframe (as they are different lengths) df_EV_Total.columns = ["EV", "Total"] # Set column names - df_EV_Total = df_EV_Total.fillna(0) # fill any missing values with 0 + df_EV_Total = df_EV_Total.fillna( + 0 + ) # fill any missing values with 0 # self.log("Attempt to concatenate is") # self.log(df_EV_Total) # self.log("Attempt to concatenate is") # self.log(df_EV_Total.to_string()) - df_EV = df_EV_Total["EV"].squeeze() # Extract EV consumption to Series - df_Total = df_EV_Total["Total"].squeeze() # Extract total consumption to Series - df = df_Total - df_EV # Substract EV consumption from Total Consumption + df_EV = df_EV_Total[ + "EV" + ].squeeze() # Extract EV consumption to Series + df_Total = df_EV_Total[ + "Total" + ].squeeze() # Extract total consumption to Series + df = ( + df_Total - df_EV + ) # Substract EV consumption from Total Consumption if self.debug: self.log("Result of subtraction is") self.log(df.to_string()) @@ -2581,7 +2815,9 @@ def load_consumption(self, start, end): dfx = pd.Series(index=df.index, data=df.to_list()) # Group by time and take the mean - df = df.groupby(df.index.time).aggregate(self.get_config("consumption_grouping")) + df = df.groupby(df.index.time).aggregate( + self.get_config("consumption_grouping") + ) df.name = "consumption" if self.debug: @@ -2592,10 +2828,14 @@ def load_consumption(self, start, end): temp = pd.DataFrame(index=index) temp["time"] = temp.index.time - consumption_mean = temp.merge(df, "left", left_on="time", right_index=True)["consumption"] + consumption_mean = temp.merge( + df, "left", left_on="time", right_index=True + )["consumption"] if days >= 7: - consumption_dow = self.get_config("day_of_week_weighting") * dfx.iloc[: len(temp)] + consumption_dow = ( + self.get_config("day_of_week_weighting") * dfx.iloc[: len(temp)] + ) if len(consumption_dow) != len(consumption_mean): self.log(">>> Inconsistent lengths in consumption arrays") self.log(f">>> dow : {len(consumption_dow)}") @@ -2610,11 +2850,14 @@ def load_consumption(self, start, end): consumption["consumption"] += pd.Series( consumption_dow.to_numpy() - + consumption_mean.to_numpy() * (1 - self.get_config("day_of_week_weighting")), + + consumption_mean.to_numpy() + * (1 - self.get_config("day_of_week_weighting")), index=consumption_mean.index, ) else: - self.log(f" - Ignoring 'Day of Week Weighting' because only {days} days of history is available") + self.log( + f" - Ignoring 'Day of Week Weighting' because only {days} days of history is available" + ) consumption["consumption"] = consumption_mean if len(entity_ids) > 0: @@ -2625,7 +2868,9 @@ def load_consumption(self, start, end): else: daily_kwh = self.get_config("daily_consumption_kwh") - self.log(f" - Creating consumption based on daily estimate of {daily_kwh} kWh") + self.log( + f" - Creating consumption based on daily estimate of {daily_kwh} kWh" + ) if self.get_config("shape_consumption_profile"): self.log(" and typical usage profile.") @@ -2640,15 +2885,21 @@ def load_consumption(self, start, end): daily.index = pd.to_datetime(daily.index, unit="h").time consumption["time"] = consumption.index.time consumption = pd.DataFrame( - consumption.merge(daily, left_on="time", right_index=True)["consumption_y"] + consumption.merge(daily, left_on="time", right_index=True)[ + "consumption_y" + ] ).set_axis(["consumption"], axis=1) else: self.log(" and flat usage profile.") - consumption["consumption"] = self.get_config("daily_consumption_kwh") * 1000 / 24 + consumption["consumption"] = ( + self.get_config("daily_consumption_kwh") * 1000 / 24 + ) self.log(" - Consumption estimated OK") - self.log(f" - Total consumption: {(consumption['consumption'].sum() / 2000):0.1f} kWh") + self.log( + f" - Total consumption: {(consumption['consumption'].sum() / 2000):0.1f} kWh" + ) if self.debug: self.log("Printing final result of routine load_consumption.....") self.log(consumption.to_string()) @@ -2662,7 +2913,9 @@ def _auto_cal(self): solar = self._get_solar(start, end) consumption = self.load_consumption(start, end) grid = self.load_grid(start, end) - soc = self.hass2df(self.config["id_battery_soc"], days=2, freq="30min").loc[start:end] + soc = self.hass2df(self.config["id_battery_soc"], days=2, freq="30min").loc[ + start:end + ] def load_grid(self, start, end): self.log( @@ -2681,7 +2934,14 @@ def load_grid(self, start, end): if self.entity_exists(entity_id): x = self.hass2df(entity_id, days=days) if x is not None: - x = (self.riemann_avg(x).loc[start : end - pd.Timedelta("30min")] / 10).round(0) * 10 * mults[id] + x = ( + ( + self.riemann_avg(x).loc[start : end - pd.Timedelta("30min")] + / 10 + ).round(0) + * 10 + * mults[id] + ) if df is None: df = x else: @@ -2703,9 +2963,13 @@ def _compare_tariffs(self): return consumption = self.load_consumption(start, end) - static = pd.concat([solar, consumption], axis=1).set_axis(["solar", "consumption"], axis=1) + static = pd.concat([solar, consumption], axis=1).set_axis( + ["solar", "consumption"], axis=1 + ) - initial_soc_df = self.hass2df(self.config["id_battery_soc"], days=2, freq="30min") + initial_soc_df = self.hass2df( + self.config["id_battery_soc"], days=2, freq="30min" + ) initial_soc = initial_soc_df.loc[start] base = self.pv_system.flows(initial_soc, static, solar="solar") @@ -2728,7 +2992,9 @@ def _compare_tariffs(self): name = tariff_set["name"] for imp_exp in IMPEXP: code[imp_exp] = tariff_set[f"octopus_{imp_exp}_tariff_code"] - tariffs[imp_exp] = pv.Tariff(code[imp_exp], export=(imp_exp == "export"), host=self) + tariffs[imp_exp] = pv.Tariff( + code[imp_exp], export=(imp_exp == "export"), host=self + ) contracts.append( pv.Contract( @@ -2740,7 +3006,10 @@ def _compare_tariffs(self): ) actual = self._cost_actual(start=start, end=end - pd.Timedelta(30, "minutes")) - static["period_start"] = static.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + ":00" + static["period_start"] = ( + static.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + + ":00" + ) entity_id = f"sensor.{self.prefix}_opt_cost_actual" self.set_state( state=round(actual.sum() / 100, 2), @@ -2751,12 +3020,19 @@ def _compare_tariffs(self): "unit_of_measurement": "GBP", "friendly_name": f"PV Opt Comparison Actual", } - | {col: static[["period_start", col]].to_dict("records") for col in ["solar", "consumption"]}, + | { + col: static[["period_start", col]].to_dict("records") + for col in ["solar", "consumption"] + }, ) self.ulog("Net Cost comparison:", underline=None) - self.log(f" {'Tariff':20s} {'Base Cost (GBP)':>20s} {'Optimised Cost (GBP)':>20s} ") - self.log(f" {'------':20s} {'---------------':>20s} {'--------------------':>20s} ") + self.log( + f" {'Tariff':20s} {'Base Cost (GBP)':>20s} {'Optimised Cost (GBP)':>20s} " + ) + self.log( + f" {'------':20s} {'---------------':>20s} {'--------------------':>20s} " + ) self.log(f" {'Actual':20s} {'':20s} {(actual.sum()/100):>20.3f}") cols = [ @@ -2782,7 +3058,10 @@ def _compare_tariffs(self): log=False, ) - opt["period_start"] = opt.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + ":00" + opt["period_start"] = ( + opt.index.tz_convert(self.tz).strftime("%Y-%m-%dT%H:%M:%S%z").str[:-2] + + ":00" + ) attributes = { "state_class": "measurement", @@ -2790,10 +3069,16 @@ def _compare_tariffs(self): "unit_of_measurement": "GBP", "friendly_name": f"PV Opt Comparison {contract.name}", "net_base": round(net_base.sum() / 100, 2), - } | {col: opt[["period_start", col]].to_dict("records") for col in cols if col in opt.columns} + } | { + col: opt[["period_start", col]].to_dict("records") + for col in cols + if col in opt.columns + } net_opt = contract.net_cost(opt, day_ahead=False) - self.log(f" {contract.name:20s} {(net_base.sum()/100):>20.3f} {(net_opt.sum()/100):>20.3f}") + self.log( + f" {contract.name:20s} {(net_base.sum()/100):>20.3f} {(net_opt.sum()/100):>20.3f}" + ) entity_id = f"sensor.{self.prefix}_opt_cost_{contract.name}" self.set_state( state=round(net_opt.sum() / 100, 2), @@ -2817,7 +3102,10 @@ def _get_solar(self, start, end): if self.entity_exists(entity_id): x = self.hass2df(entity_id, days=days) if x is not None: - x = (self.riemann_avg(x).loc[start : end - pd.Timedelta("30min")] / 10).round(0) * 10 + x = ( + self.riemann_avg(x).loc[start : end - pd.Timedelta("30min")] + / 10 + ).round(0) * 10 if df is None: df = x else: @@ -2839,16 +3127,18 @@ def _check_tariffs_vs_bottlecap(self): else: df = pd.DataFrame( - self.get_state_retry(self.bottlecap_entities[direction], attribute=("rates")) + self.get_state_retry( + self.bottlecap_entities[direction], attribute=("rates") + ) ).set_index("start")["value_inc_vat"] df.index = pd.to_datetime(df.index, utc=True) df *= 100 df = pd.concat( [ df, - self.contract.tariffs[direction].to_df(start=df.index[0], end=df.index[-1], day_ahead=False)[ - "unit" - ], + self.contract.tariffs[direction].to_df( + start=df.index[0], end=df.index[-1], day_ahead=False + )["unit"], ], axis=1, ).set_axis(["bottlecap", "pv_opt"], axis=1) @@ -2856,7 +3146,13 @@ def _check_tariffs_vs_bottlecap(self): # Drop any Savings Sessions for id in self.saving_events: - df = df.drop(df[self.saving_events[id]["start"] : self.saving_events[id]["end"]].index[:-1]) + df = df.drop( + df[ + self.saving_events[id]["start"] : self.saving_events[id][ + "end" + ] + ].index[:-1] + ) pvopt_price = df["pv_opt"].mean() bottlecap_price = df["bottlecap"].mean() @@ -2891,7 +3187,10 @@ def _list_entities(self, domains=["select", "number", "sensor"]): states = self.get_state_retry(domain) states = {k: states[k] for k in states if self.device_name in k} for entity_id in states: - x = entity_id + f" ({states[entity_id]['attributes'].get('device_class',None)}):" + x = ( + entity_id + + f" ({states[entity_id]['attributes'].get('device_class',None)}):" + ) x = f" {x:60s}" if domain != "select": @@ -2953,7 +3252,9 @@ def write_and_poll_value(self, entity_id, value, tolerance=0.0, verbose=False): if diff > tolerance: changed = True try: - self.call_service("number/set_value", entity_id=entity_id, value=str(value)) + self.call_service( + "number/set_value", entity_id=entity_id, value=str(value) + ) written = False retries = 0 @@ -2977,7 +3278,9 @@ def set_select(self, item, state): if state is not None: entity_id = self.config[f"id_{item}"] if self.get_state_retry(entity_id=entity_id) != state: - self.call_service("select/select_option", entity_id=entity_id, option=state) + self.call_service( + "select/select_option", entity_id=entity_id, option=state + ) self.rlog(f"Setting {entity_id} to {state}") def get_state_retry(self, *args, **kwargs): @@ -3016,7 +3319,11 @@ def riemann_avg(self, x, freq="30min"): dt = x.index.diff().total_seconds().fillna(0) integral = (dt * x.shift(1)).fillna(0).cumsum().resample(freq).last() - avg = (integral.diff().shift(-1)[:-1] / pd.Timedelta(freq).total_seconds()).fillna(0).round(1) + avg = ( + (integral.diff().shift(-1)[:-1] / pd.Timedelta(freq).total_seconds()) + .fillna(0) + .round(1) + ) # self.log(avg) return avg