Skip to content

Commit

Permalink
Fix - Fixed missing attributes is the sensors when using the custom IDs
Browse files Browse the repository at this point in the history
  • Loading branch information
davidusb-geek committed Jun 29, 2023
1 parent 5ebd808 commit 6eb007b
Show file tree
Hide file tree
Showing 7 changed files with 76 additions and 27 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
### Improvement
- Added support for data reconstruction when missing values on last window for ML forecaster prediction.
- Added treatment of SOCtarget passed at runtime for day-ahead optimization.
- Added publish_prefix key to pass a common prefix to all published data.
### Fix
- Patched sensor rounding problem.
- Bump myst-parser from 1.0.0 to 2.0.0
- Fixed missing attributes is the sensors when using the custom IDs.

## [0.4.12] - 2023-06-03
### Improvement
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -355,6 +355,8 @@ Here is the list of the other additional dictionnary keys that can be passed at

- `SOCtarget` for the desired target value of initial and final SOC.

- `publish_prefix` use this key to pass a common prefix to all published data.

## A naive Model Predictive Controller

A MPC controller was introduced in v0.3.0. This is an informal/naive representation of a MPC controller.
Expand Down
44 changes: 33 additions & 11 deletions src/emhass/command_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,7 @@ def forecast_model_predict(input_data_dict: dict, logger: logging.Logger,
model_predict_entity_id = input_data_dict['params']['passed_data']['model_predict_entity_id']
model_predict_unit_of_measurement = input_data_dict['params']['passed_data']['model_predict_unit_of_measurement']
model_predict_friendly_name = input_data_dict['params']['passed_data']['model_predict_friendly_name']
publish_prefix = input_data_dict['params']['passed_data']['publish_prefix']
if model_predict_publish:
# Estimate the current index
now_precise = datetime.now(input_data_dict['retrieve_hass_conf']['time_zone']).replace(second=0, microsecond=0)
Expand All @@ -380,7 +381,8 @@ def forecast_model_predict(input_data_dict: dict, logger: logging.Logger,
model_predict_entity_id,
model_predict_unit_of_measurement,
model_predict_friendly_name,
from_mlforecaster=True)
type_var = 'mlforecaster',
publish_prefix=publish_prefix)
return predictions

def forecast_model_tune(input_data_dict: dict, logger: logging.Logger,
Expand Down Expand Up @@ -462,19 +464,25 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
idx_closest = opt_res_latest.index.get_indexer([now_precise], method='bfill')[0]
if idx_closest == -1:
idx_closest = opt_res_latest.index.get_indexer([now_precise], method='nearest')[0]
# Publish PV forecast
# Publish the data
params = json.loads(input_data_dict['params'])
publish_prefix = params['passed_data']['publish_prefix']
# Publish PV forecast
custom_pv_forecast_id = params['passed_data']['custom_pv_forecast_id']
input_data_dict['rh'].post_data(opt_res_latest['P_PV'], idx_closest,
custom_pv_forecast_id["entity_id"],
custom_pv_forecast_id["unit_of_measurement"],
custom_pv_forecast_id["friendly_name"])
custom_pv_forecast_id["friendly_name"],
type_var = 'power',
publish_prefix=publish_prefix)
# Publish Load forecast
custom_load_forecast_id = params['passed_data']['custom_load_forecast_id']
input_data_dict['rh'].post_data(opt_res_latest['P_Load'], idx_closest,
custom_load_forecast_id["entity_id"],
custom_load_forecast_id["unit_of_measurement"],
custom_load_forecast_id["friendly_name"])
custom_load_forecast_id["friendly_name"],
type_var = 'power',
publish_prefix=publish_prefix)
cols_published = ['P_PV', 'P_Load']
# Publish deferrable loads
custom_deferrable_forecast_id = params['passed_data']['custom_deferrable_forecast_id']
Expand All @@ -485,7 +493,9 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
input_data_dict['rh'].post_data(opt_res_latest["P_deferrable{}".format(k)], idx_closest,
custom_deferrable_forecast_id[k]["entity_id"],
custom_deferrable_forecast_id[k]["unit_of_measurement"],
custom_deferrable_forecast_id[k]["friendly_name"])
custom_deferrable_forecast_id[k]["friendly_name"],
type_var = 'deferrable',
publish_prefix=publish_prefix)
cols_published = cols_published+["P_deferrable{}".format(k)]
# Publish battery power
if input_data_dict['opt'].optim_conf['set_use_battery']:
Expand All @@ -496,41 +506,53 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
input_data_dict['rh'].post_data(opt_res_latest['P_batt'], idx_closest,
custom_batt_forecast_id["entity_id"],
custom_batt_forecast_id["unit_of_measurement"],
custom_batt_forecast_id["friendly_name"])
custom_batt_forecast_id["friendly_name"],
type_var = 'batt',
publish_prefix=publish_prefix)
cols_published = cols_published+["P_batt"]
custom_batt_soc_forecast_id = params['passed_data']['custom_batt_soc_forecast_id']
input_data_dict['rh'].post_data(opt_res_latest['SOC_opt']*100, idx_closest,
custom_batt_soc_forecast_id["entity_id"],
custom_batt_soc_forecast_id["unit_of_measurement"],
custom_batt_soc_forecast_id["friendly_name"])
custom_batt_soc_forecast_id["friendly_name"],
type_var = 'SOC',
publish_prefix=publish_prefix)
cols_published = cols_published+["SOC_opt"]
# Publish grid power
custom_grid_forecast_id = params['passed_data']['custom_grid_forecast_id']
input_data_dict['rh'].post_data(opt_res_latest['P_grid'], idx_closest,
custom_grid_forecast_id["entity_id"],
custom_grid_forecast_id["unit_of_measurement"],
custom_grid_forecast_id["friendly_name"])
custom_grid_forecast_id["friendly_name"],
type_var = 'power',
publish_prefix=publish_prefix)
cols_published = cols_published+["P_grid"]
# Publish total value of cost function
custom_cost_fun_id = params['passed_data']['custom_cost_fun_id']
col_cost_fun = [i for i in opt_res_latest.columns if 'cost_fun_' in i]
input_data_dict['rh'].post_data(opt_res_latest[col_cost_fun], idx_closest,
custom_cost_fun_id["entity_id"],
custom_cost_fun_id["unit_of_measurement"],
custom_cost_fun_id["friendly_name"])
custom_cost_fun_id["friendly_name"],
type_var = 'cost_fun',
publish_prefix=publish_prefix)
# Publish unit_load_cost
custom_unit_load_cost_id = params['passed_data']['custom_unit_load_cost_id']
input_data_dict['rh'].post_data(opt_res_latest['unit_load_cost'], idx_closest,
custom_unit_load_cost_id["entity_id"],
custom_unit_load_cost_id["unit_of_measurement"],
custom_unit_load_cost_id["friendly_name"])
custom_unit_load_cost_id["friendly_name"],
type_var = 'unit_load_cost',
publish_prefix=publish_prefix)
cols_published = cols_published+["unit_load_cost"]
# Publish unit_prod_price
custom_unit_prod_price_id = params['passed_data']['custom_unit_prod_price_id']
input_data_dict['rh'].post_data(opt_res_latest['unit_prod_price'], idx_closest,
custom_unit_prod_price_id["entity_id"],
custom_unit_prod_price_id["unit_of_measurement"],
custom_unit_prod_price_id["friendly_name"])
custom_unit_prod_price_id["friendly_name"],
type_var = 'unit_prod_price',
publish_prefix=publish_prefix)
cols_published = cols_published+["unit_prod_price"]
# Create a DF resuming what has been published
opt_res = opt_res_latest[cols_published].loc[[opt_res_latest.index[idx_closest]]]
Expand Down
31 changes: 20 additions & 11 deletions src/emhass/retrieve_hass.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,9 @@ def get_attr_data_dict(data_df: pd.DataFrame, idx: int, entity_id: str,

def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str,
unit_of_measurement: str, friendly_name: str,
from_mlforecaster: Optional[bool]=False) -> None:
type_var: str,
from_mlforecaster: Optional[bool]=False,
publish_prefix: Optional[str]="") -> None:
r"""
Post passed data to hass.
Expand All @@ -258,8 +260,15 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str,
:type unit_of_measurement: str
:param friendly_name: The friendly name that will be used in the hass frontend.
:type friendly_name: str
:param type_var: A variable to indicate the type of variable: power, SOC, etc.
:type type_var: str
:param publish_prefix: A common prefix for all published data entity_id.
:type publish_prefix: str, optional
"""
# Add a possible prefix to the entity ID
entity_id = entity_id.replace('sensor.', 'sensor.'+publish_prefix)
# Set the URL
if self.hass_url == "http://supervisor/core/api": # If we are using the supervisor API
url = self.hass_url+"/states/"+entity_id
else: # Otherwise the Home Assistant Core API it is
Expand All @@ -269,31 +278,31 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str,
"content-type": "application/json",
}
# Preparing the data dict to be published
if 'cost_fun_' in entity_id:
if type_var == 'cost_fun':
state = np.round(data_df.sum()[0],2)
elif 'unit_' in entity_id:
elif type_var == 'unit_load_cost' or type_var == 'unit_prod_price':
state = np.round(data_df.loc[data_df.index[idx]],4)
else:
state = np.round(data_df.loc[data_df.index[idx]],2)
if 'p_pv' in entity_id or 'p_load' in entity_id or 'p_grid' in entity_id:
if type_var == 'power':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "forecasts", state)
elif 'deferrable' in entity_id:
elif type_var == 'deferrable':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "deferrables_schedule", state)
elif 'batt' in entity_id:
elif type_var == 'batt':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "battery_scheduled_power", state)
elif 'SOC' in entity_id:
elif type_var == 'SOC':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "battery_scheduled_soc", state)
elif 'unit_load_cost' in entity_id:
elif type_var == 'unit_load_cost':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "unit_load_cost_forecasts", state)
elif 'unit_prod_price' in entity_id:
elif type_var == 'unit_prod_price':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "unit_load_cost_forecasts", state)
elif from_mlforecaster:
friendly_name, "unit_prod_price_forecasts", state)
elif type_var == 'mlforecaster':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "scheduled_forecast", state)
else:
Expand Down
9 changes: 8 additions & 1 deletion src/emhass/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
'custom_cost_fun_id': {"entity_id": "sensor.total_cost_fun_value", "unit_of_measurement": "", "friendly_name": "Total cost function value"},
'custom_unit_load_cost_id': {"entity_id": "sensor.unit_load_cost", "unit_of_measurement": "€/kWh", "friendly_name": "Unit Load Cost"},
'custom_unit_prod_price_id': {"entity_id": "sensor.unit_prod_price", "unit_of_measurement": "€/kWh", "friendly_name": "Unit Prod Price"},
'custom_deferrable_forecast_id': custom_deferrable_forecast_id}
'custom_deferrable_forecast_id': custom_deferrable_forecast_id,
'publish_prefix': ""}
if 'passed_data' in params.keys():
for key, value in default_passed_dict.items():
params['passed_data'][key] = value
Expand Down Expand Up @@ -344,6 +345,12 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
params['passed_data']['custom_unit_prod_price_id'] = runtimeparams['custom_unit_prod_price_id']
if 'custom_deferrable_forecast_id' in runtimeparams.keys():
params['passed_data']['custom_deferrable_forecast_id'] = runtimeparams['custom_deferrable_forecast_id']
# A condition to put a prefix on all published data
if 'publish_prefix' not in runtimeparams.keys():
publish_prefix = ""
else:
publish_prefix = runtimeparams['publish_prefix']
params['passed_data']['publish_prefix'] = publish_prefix
# Serialize the final params
params = json.dumps(params)
return params, retrieve_hass_conf, optim_conf, plant_conf
Expand Down
13 changes: 9 additions & 4 deletions tests/test_retrieve_hass.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,9 @@ def test_prepare_data_negative_load(self):
self.assertEqual(self.rh.df_final.index.tz, self.retrieve_hass_conf['time_zone'])

def test_publish_data(self):
response, data = self.rh.post_data(self.df_raw[self.df_raw.columns[0]], 25, 'sensor.p_pv_forecast', "Unit", "Variable")
response, data = self.rh.post_data(self.df_raw[self.df_raw.columns[0]],
25, 'sensor.p_pv_forecast', "Unit", "Variable",
type_var = 'power')
self.assertEqual(response.status_code, 200)
self.assertTrue(data['state']==str(np.round(self.df_raw.loc[self.df_raw.index[25],self.df_raw.columns[0]],2)))
self.assertTrue(data['attributes']['unit_of_measurement']=='Unit')
Expand All @@ -145,17 +147,20 @@ def test_publish_data(self):
df.columns = ['P_PV', 'P_Load']
df["P_batt"] = 1000.0
df["SOC_opt"] = 0.5
response, data = self.rh.post_data(df["P_PV"], 25, 'sensor.p_pv_forecast', "W", "PV Forecast")
response, data = self.rh.post_data(df["P_PV"], 25, 'sensor.p_pv_forecast', "W", "PV Forecast",
type_var = 'power')
self.assertEqual(response.status_code, 200)
self.assertTrue(data['state']==str(np.round(df.loc[df.index[25],df.columns[0]],2)))
self.assertTrue(data['attributes']['unit_of_measurement']=='W')
self.assertTrue(data['attributes']['friendly_name']=='PV Forecast')
self.assertIsInstance(data['attributes']['forecasts'], list)
response, data = self.rh.post_data(df["P_batt"], 25, 'sensor.p_batt_forecast', "W", "Battery Power Forecast")
response, data = self.rh.post_data(df["P_batt"], 25, 'sensor.p_batt_forecast', "W", "Battery Power Forecast",
type_var = 'batt')
self.assertEqual(response.status_code, 200)
self.assertTrue(data['attributes']['unit_of_measurement']=='W')
self.assertTrue(data['attributes']['friendly_name']=='Battery Power Forecast')
response, data = self.rh.post_data(df["SOC_opt"], 25, 'sensor.SOC_forecast', "%", "Battery SOC Forecast")
response, data = self.rh.post_data(df["SOC_opt"], 25, 'sensor.SOC_forecast', "%", "Battery SOC Forecast",
type_var = 'SOC')
self.assertEqual(response.status_code, 200)
self.assertTrue(data['attributes']['unit_of_measurement']=='%')
self.assertTrue(data['attributes']['friendly_name']=='Battery SOC Forecast')
Expand Down
2 changes: 2 additions & 0 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def test_treat_runtimeparams(self):
runtimeparams.update({'solcast_rooftop_id':'yourrooftopid'})
runtimeparams.update({'solar_forecast_kwp':5.0})
runtimeparams.update({'SOCtarget':0.4})
runtimeparams.update({'publish_prefix':'emhass_'})
runtimeparams_json = json.dumps(runtimeparams)
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(
pathlib.Path(root+'/config_emhass.yaml'), use_secrets=True, params=self.params_json)
Expand All @@ -140,6 +141,7 @@ def test_treat_runtimeparams(self):
self.assertTrue(retrieve_hass_conf['solcast_rooftop_id'] == 'yourrooftopid')
self.assertTrue(retrieve_hass_conf['solar_forecast_kwp'] == 5.0)
self.assertTrue(plant_conf['SOCtarget'] == 0.4)
self.assertTrue(params['passed_data']['publish_prefix'] == 'emhass_')

def test_treat_runtimeparams_failed(self):
params = TestCommandLineUtils.get_test_params()
Expand Down

0 comments on commit 6eb007b

Please sign in to comment.