Skip to content

Commit

Permalink
Merge pull request #53 from fboundy/51-not-loading-custom-tariff-corr…
Browse files Browse the repository at this point in the history
…ectly

3.2.2 - Fixes fro FLUX
  • Loading branch information
fboundy authored Dec 14, 2023
2 parents 26161f6 + 9104d1e commit c1b71dc
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 23 deletions.
36 changes: 23 additions & 13 deletions apps/pv_opt/pv_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
#
USE_TARIFF = True

VERSION = "3.2.1"
VERSION = "3.2.2"

DATE_TIME_FORMAT_LONG = "%Y-%m-%d %H:%M:%S%z"
DATE_TIME_FORMAT_SHORT = "%d-%b %H:%M"
Expand Down Expand Up @@ -924,6 +924,7 @@ def _expose_configs(self):
)
and ("id_" not in item)
and ("alt_" not in item)
and ("auto" not in item)
and "domain" in defaults[item]
]
for item in untracked_items:
Expand All @@ -950,7 +951,7 @@ def _expose_configs(self):
conf = conf | MQTT_CONFIGS[domain]

conf_topic = f"homeassistant/{domain}/{id}/config"
self.mqtt.mqtt_publish(conf_topic, dumps(conf), retain=True)
self.mqtt.mqtt_publish(conf_topic, dumps(conf), retain=False)

if item == "battery_capacity_Wh":
capacity = self._estimate_capacity()
Expand Down Expand Up @@ -1342,7 +1343,12 @@ def _log_inverter_status(self, status):
self.log(f" {x:16s}: {status[s][x]}")
self.log("")

def write_to_hass(self, entity, state, attributes):
def write_to_hass(self, entity, state, attributes={}):
# self.log(f">>> entity:{entity}")
# self.log(f">>> state:{state}")
# for k in attributes:
# self.log(f">>> {k}:{attributes[k]}")

if not self.entity_exists(entity_id=entity):
self.log(f"Creating HA Entity {entity}")
id = entity.replace("sensor.", "")
Expand All @@ -1354,13 +1360,11 @@ def write_to_hass(self, entity, state, attributes):
}

conf_topic = f"homeassistant/self/{id}/config"
self.mqtt.mqtt_publish(conf_topic, dumps(conf), retain=True)
self.mqtt.mqtt_publish(conf_topic, dumps(conf), retain=False)

try:
self.my_entity = self.get_entity(entity)
self.my_entity.set_state(state=state, attributes=attributes)
if self.debug:
self.log(f"Output written to {self.my_entity}")
self.set_state(state=state, entity_id=entity, attributes=attributes)
self.log(f"Output written to {entity}")

except Exception as e:
self.log(f"Couldn't write to entity {entity}: {e}")
Expand All @@ -1384,10 +1388,10 @@ def write_cost(self, name, entity, cost, df):
+ ":00"
)

self.write_to_hass(
entity=entity,
state=round((cost["cost"].sum()) / 100, 2),
attributes={
state = round((cost["cost"].sum()) / 100, 2)

attributes = (
{
"friendly_name": name,
"unit_of_measurement": "GBP",
"cost_today": round(
Expand All @@ -1401,7 +1405,13 @@ def write_cost(self, name, entity, cost, df):
for col in cols
if col in df.columns
}
| {"cost": cost[["period_start", "cumulative_cost"]].to_dict("records")},
| {"cost": cost[["period_start", "cumulative_cost"]].to_dict("records")}
)

self.write_to_hass(
entity=entity,
state=state,
attributes=attributes,
)

def _write_output(self):
Expand Down
25 changes: 15 additions & 10 deletions apps/pv_opt/pvpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,10 +213,15 @@ def to_df(self, start=None, end=None):
df.name = "unit"

if not self.export:
x = pd.DataFrame(self.fixed).set_index("valid_from")["value_inc_vat"]
x = (
pd.DataFrame(self.fixed)
.set_index("valid_from")["value_inc_vat"]
.sort_index()
)
x.index = pd.to_datetime(x.index)
newindex = pd.date_range(x.index[0], df.index[-1], freq="30T")
x = x.reindex(newindex).sort_index().ffill().loc[df.index[0] :]
x = x.reindex(newindex).sort_index()
x = x.ffill().loc[df.index[0] :]
df = pd.concat([df, x], axis=1).set_axis(["unit", "fixed"], axis=1)

mask = df.index.time != pd.Timestamp("00:00", tz="UTC").time()
Expand Down Expand Up @@ -460,7 +465,7 @@ def flows(self, initial_soc, static_flows, slots=[], soc_now=None, **kwargs):
timed_slot_flows = pd.Series(index=df.index, data=0)

for t, c in slots:
timed_slot_flows.loc[t] += c
timed_slot_flows.loc[t] += int(c)

chg_mask = timed_slot_flows != 0
battery_flows[chg_mask] = timed_slot_flows[chg_mask]
Expand Down Expand Up @@ -643,20 +648,20 @@ def optimised_force(self, initial_soc, static_flows, contract: Contract, **kwarg
x = x[x["soc_end"] <= 97]

search_window = x.index
str_log = f"{i:>2d} {available.sum():>2d} {max_slot.strftime(TIME_FORMAT)} costs {max_import_cost:5.2f}p. "
str_log += f"Energy: {round_trip_energy_required:5.2f} kWh. "
str_log = f"{max_slot.strftime(TIME_FORMAT)}: {round_trip_energy_required:5.2f} kWh at {max_import_cost:6.2f}p. "
if len(search_window) > 0:
str_log += f"Window: [{search_window[0].strftime(TIME_FORMAT)}-{search_window[-1].strftime(TIME_FORMAT)}] "
# str_log += f"Window: [{search_window[0].strftime(TIME_FORMAT)}-{search_window[-1].strftime(TIME_FORMAT)}] "
pass
else:
str_log = "No available window."
# str_log = "No available window."
done = True
if len(x) > 0:
min_price = x["import"].min()
start_window = x[x["import"] == min_price].index[0]

cost_at_min_price = round_trip_energy_required * min_price
str_log += f"Min price at {start_window.strftime(TIME_FORMAT)}: {min_price:5.2f}p/kWh costing {cost_at_min_price:5.2f} "
str_log += f"SOC: {x.loc[start_window]['soc']:5.1f}%->{x.loc[start_window]['soc_end']:5.1f}% "
str_log += f"<==> {start_window.strftime(TIME_FORMAT)}: {min_price:5.2f}p/kWh {cost_at_min_price:5.2f}p "
str_log += f" SOC: {x.loc[start_window]['soc']:5.1f}%->{x.loc[start_window]['soc_end']:5.1f}% "
if pd.Timestamp.now() > start_window.tz_localize(None):
str_log += "* "
factor = (
Expand Down Expand Up @@ -701,7 +706,7 @@ def optimised_force(self, initial_soc, static_flows, contract: Contract, **kwarg
)
str_log += f"New SOC: {df.loc[start_window]['soc']:5.1f}%->{df.loc[start_window]['soc_end']:5.1f}% "
net_cost_opt = contract.net_cost(df).sum()
str_log += f"Net: {net_cost_opt:5.1f}"
str_log += f"Net: {net_cost_opt:6.1f}"
self.log(str_log)
else:
available[max_slot] = False
Expand Down

0 comments on commit c1b71dc

Please sign in to comment.