Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix - Preparing version 0.6.0, see CHANGELOG #137

Merged
merged 3 commits into from
Dec 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,23 @@
# Changelog

## [0.6.0] - 2023-12-16
### Improvement
- Now Python 3.11 is fully supported, thanks to @pail23
- We now publish the optimization status on sensor.optim_status
- Bumped setuptools, skforecast, numpy, scipy, pandas
- A good bunch of documentation improvements thanks to @g1za
- Improved code coverage (a little bit ;-)
### Fix
- Some fixes managing time zones, thanks to @pail23
- Bug fix on grid cost function equation, thanks to @michaelpiron
- Applying a first set of fixes proposed by @smurfix:
- Don't ignore HTTP errors
- Handle missing variable correctly
- Slight error message improvement
- Just use the default solver
- Get locations from environment in non-app mode
- Tolerate running directly from source

## [0.5.1] - 2023-10-19
### Improvement
- Improved documentation, thanks to @g1za
Expand Down Expand Up @@ -471,6 +489,9 @@
[0.4.13]: https://github.com/davidusb-geek/emhass/releases/tag/v0.4.13
[0.4.14]: https://github.com/davidusb-geek/emhass/releases/tag/v0.4.14
[0.4.15]: https://github.com/davidusb-geek/emhass/releases/tag/v0.4.15
[0.5.0]: https://github.com/davidusb-geek/emhass/releases/tag/v0.5.0
[0.5.1]: https://github.com/davidusb-geek/emhass/releases/tag/v0.5.1
[0.6.0]: https://github.com/davidusb-geek/emhass/releases/tag/v0.6.0

# Notes
All notable changes to this project will be documented in this file.
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.8-slim-buster
FROM python:3.9-slim-buster

# switch working directory
WORKDIR /app
Expand Down
2 changes: 1 addition & 1 deletion config_emhass.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ optim_conf:
- prod_price_forecast_method: 'constant' # options are 'constant' for constant fixed value or 'csv' to load custom price forecast from a CSV file
- prod_sell_price: 0.065 # power production selling price in €/kWh (only needed if prod_price_forecast_method='constant')
- set_total_pv_sell: False # consider that all PV power is injected to the grid (self-consumption with total sell)
- lp_solver: 'PULP_CBC_CMD' # set the name of the linear programming solver that will be used
- lp_solver: 'default' # set the name of the linear programming solver that will be used
- lp_solver_path: 'empty' # set the path to the LP solver
- set_nocharge_from_grid: False # avoid battery charging from the grid
- set_nodischarge_to_grid: True # avoid battery discharging to the grid
Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
author = 'David HERNANDEZ'

# The full version, including alpha/beta/rc tags
release = '0.5.1'
release = '0.6.0'

# -- General configuration ---------------------------------------------------

Expand Down
6 changes: 3 additions & 3 deletions scripts/load_clustering_stumpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
# fig = (px.line(df, x='Clusters', y='Distortions', template=template)).update_traces(mode='lines+markers')
# fig.show()

# The silouhette metod
# The silouhette method
silhouette_scores = []
K = range(2,12)

Expand All @@ -111,7 +111,7 @@
# The clustering
kmeans = KMeans(n_clusters=6, init='k-means++')
kmeans = kmeans.fit(data_lag)
data['cluster_group'] = kmeans.labels_
data_lag['cluster_group'] = kmeans.labels_

fig = px.scatter(data, x='power_load y(t)', y='power_load y(t+1)', color='cluster_group', template=template)
fig = px.scatter(data_lag, x='power_load y(t)', y='power_load y(t+1)', color='cluster_group', template=template)
fig.show()
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

setup(
name='emhass', # Required
version='0.5.1', # Required
version='0.6.0', # Required
description='An Energy Management System for Home Assistant', # Optional
long_description=long_description, # Optional
long_description_content_type='text/markdown', # Optional (see note above)
Expand Down
8 changes: 4 additions & 4 deletions src/emhass/command_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,15 +536,15 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
custom_cost_fun_id["friendly_name"],
type_var = 'cost_fun',
publish_prefix = publish_prefix)
# Publish the optimization status (A work in progress, will be available on future release)
'''
# Publish the optimization status
custom_cost_fun_id = params['passed_data']['custom_optim_status_id']
input_data_dict['rh'].post_data(input_data_dict['opt'].optim_status, idx_closest,
input_data_dict['rh'].post_data(opt_res_latest['optim_status'], idx_closest,
custom_cost_fun_id["entity_id"],
custom_cost_fun_id["unit_of_measurement"],
custom_cost_fun_id["friendly_name"],
type_var = 'optim_status',
publish_prefix = publish_prefix)'''
publish_prefix = publish_prefix)
cols_published = cols_published+["optim_status"]
# Publish unit_load_cost
custom_unit_load_cost_id = params['passed_data']['custom_unit_load_cost_id']
input_data_dict['rh'].post_data(opt_res_latest['unit_load_cost'], idx_closest,
Expand Down
3 changes: 2 additions & 1 deletion src/emhass/forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,8 @@ def get_weather_forecast(self, method: Optional[str] = 'scrapper',
# Define index
data.set_index('ts', inplace=True)
else:
self.logger.error("Passed method is not valid")
self.logger.error("Method %r is not valid", method)
data = None
return data

def cloud_cover_to_irradiance(self, cloud_cover: pd.Series,
Expand Down
28 changes: 15 additions & 13 deletions src/emhass/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,25 +387,24 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n

## Finally, we call the solver to solve our optimization model:
# solving with default solver CBC
try:
if self.lp_solver == 'PULP_CBC_CMD':
opt_model.solve(PULP_CBC_CMD(msg=0))
elif self.lp_solver == 'GLPK_CMD':
opt_model.solve(GLPK_CMD(msg=0))
elif self.lp_solver == 'COIN_CMD':
opt_model.solve(COIN_CMD(msg=0, path=self.lp_solver_path))
else:
self.logger.error("Invalid solver name passed")
except Exception:
self.logger.error("It was not possible to find a valid solver for Pulp package")
if self.lp_solver == 'PULP_CBC_CMD':
opt_model.solve(PULP_CBC_CMD(msg=0))
elif self.lp_solver == 'GLPK_CMD':
opt_model.solve(GLPK_CMD(msg=0))
elif self.lp_solver == 'COIN_CMD':
opt_model.solve(COIN_CMD(msg=0, path=self.lp_solver_path))
else:
self.logger.warning("Solver %s unknown, using default", self.lp_solver)
opt_model.solve()

# The status of the solution is printed to the screen
self.optim_status = plp.LpStatus[opt_model.status]
self.logger.info("Status: " + self.optim_status)
if plp.value(opt_model.objective) is None:
self.logger.warning("Cost function cannot be evaluated, probably None")
self.logger.warning("Cost function cannot be evaluated")
return
else:
self.logger.info("Total value of the Cost function = " + str(round(plp.value(opt_model.objective),2)))
self.logger.info("Total value of the Cost function = %.02f", plp.value(opt_model.objective))

# Build results Dataframe
opt_tp = pd.DataFrame()
Expand Down Expand Up @@ -462,6 +461,9 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n
unit_prod_price[i]*P_grid_neg[i].varValue) for i in set_I]
else:
self.logger.error("The cost function specified type is not valid")

# Add the optimization status
opt_tp["optim_status"] = self.optim_status

return opt_tp

Expand Down
28 changes: 19 additions & 9 deletions src/emhass/retrieve_hass.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,9 @@ def get_data(self, days_list: pd.date_range, var_list: list, minimal_response: O
response = get(url, headers=headers)
except Exception:
return "Request Get Error"
else:
if response.status_code > 299:
return f"Request Get Error: {response.status_code}"
'''import bz2 # Uncomment to save a serialized data for tests
import _pickle as cPickle
with bz2.BZ2File("data/test_response_get_data_get_method.pbz2", "w") as f:
Expand Down Expand Up @@ -174,17 +177,14 @@ def prepare_data(self, var_load: str, load_negative: Optional[bool] = False, set
:rtype: pandas.DataFrame

"""
if load_negative: # Apply the correct sign to load power
try:
try:
if load_negative: # Apply the correct sign to load power
self.df_final[var_load+'_positive'] = -self.df_final[var_load]
except KeyError:
self.logger.error("Variable "+var_load+" was not found. This is typically because no data could be retrieved from Home Assistant")
else:
try:
else:
self.df_final[var_load+'_positive'] = self.df_final[var_load]
except KeyError:
self.logger.error("Variable "+var_load+" was not found. This is typically because no data could be retrieved from Home Assistant")
self.df_final.drop([var_load], inplace=True, axis=1)
self.df_final.drop([var_load], inplace=True, axis=1)
except KeyError:
self.logger.error("Variable "+var_load+" was not found. This is typically because no data could be retrieved from Home Assistant")
if set_zero_min: # Apply minimum values
self.df_final.clip(lower=0.0, inplace=True, axis=1)
self.df_final.replace(to_replace=0.0, value=np.nan, inplace=True)
Expand Down Expand Up @@ -282,6 +282,8 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str,
state = np.round(data_df.sum()[0],2)
elif type_var == 'unit_load_cost' or type_var == 'unit_prod_price':
state = np.round(data_df.loc[data_df.index[idx]],4)
elif type_var == 'optim_status':
state = data_df.loc[data_df.index[idx]]
else:
state = np.round(data_df.loc[data_df.index[idx]],2)
if type_var == 'power':
Expand All @@ -305,6 +307,14 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str,
elif type_var == 'mlforecaster':
data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
friendly_name, "scheduled_forecast", state)
elif type_var == 'optim_status':
data = {
"state": state,
"attributes": {
"unit_of_measurement": unit_of_measurement,
"friendly_name": friendly_name
}
}
else:
data = {
"state": "{:.2f}".format(state),
Expand Down
3 changes: 3 additions & 0 deletions src/emhass/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
'custom_batt_soc_forecast_id': {"entity_id": "sensor.soc_batt_forecast", "unit_of_measurement": "%", "friendly_name": "Battery SOC Forecast"},
'custom_grid_forecast_id': {"entity_id": "sensor.p_grid_forecast", "unit_of_measurement": "W", "friendly_name": "Grid Power Forecast"},
'custom_cost_fun_id': {"entity_id": "sensor.total_cost_fun_value", "unit_of_measurement": "", "friendly_name": "Total cost function value"},
'custom_optim_status_id': {"entity_id": "sensor.optim_status", "unit_of_measurement": "", "friendly_name": "EMHASS optimization status"},
'custom_unit_load_cost_id': {"entity_id": "sensor.unit_load_cost", "unit_of_measurement": "€/kWh", "friendly_name": "Unit Load Cost"},
'custom_unit_prod_price_id': {"entity_id": "sensor.unit_prod_price", "unit_of_measurement": "€/kWh", "friendly_name": "Unit Prod Price"},
'custom_deferrable_forecast_id': custom_deferrable_forecast_id,
Expand Down Expand Up @@ -339,6 +340,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic
params['passed_data']['custom_grid_forecast_id'] = runtimeparams['custom_grid_forecast_id']
if 'custom_cost_fun_id' in runtimeparams.keys():
params['passed_data']['custom_cost_fun_id'] = runtimeparams['custom_cost_fun_id']
if 'custom_optim_status_id' in runtimeparams.keys():
params['passed_data']['custom_optim_status_id'] = runtimeparams['custom_optim_status_id']
if 'custom_unit_load_cost_id' in runtimeparams.keys():
params['passed_data']['custom_unit_load_cost_id'] = runtimeparams['custom_unit_load_cost_id']
if 'custom_unit_prod_price_id' in runtimeparams.keys():
Expand Down
13 changes: 8 additions & 5 deletions src/emhass/web_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from jinja2 import Environment, PackageLoader
from requests import get
from waitress import serve
from importlib.metadata import version
from importlib.metadata import version, PackageNotFoundError
from pathlib import Path
import os, json, argparse, pickle, yaml, logging
from distutils.util import strtobool
Expand Down Expand Up @@ -265,9 +265,9 @@ def action_call(action_name):
app.logger.error("options.json does not exists")
DATA_PATH = "/share/" #"/data/"
else:
CONFIG_PATH = "/app/config_emhass.yaml"
CONFIG_PATH = os.getenv("CONFIG_PATH", default="/app/config_emhass.yaml")
options = None
DATA_PATH = "/app/data/"
DATA_PATH = os.getenv("DATA_PATH", default="/app/data/")
config_path = Path(CONFIG_PATH)
data_path = Path(DATA_PATH)

Expand Down Expand Up @@ -328,7 +328,7 @@ def action_call(action_name):
else:
costfun = os.getenv('LOCAL_COSTFUN', default='profit')
logging_level = os.getenv('LOGGING_LEVEL', default='INFO')
with open('/app/secrets_emhass.yaml', 'r') as file:
with open(os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'), 'r') as file:
params_secrets = yaml.load(file, Loader=yaml.FullLoader)
hass_url = params_secrets['hass_url']

Expand Down Expand Up @@ -364,5 +364,8 @@ def action_call(action_name):
app.logger.info("Launching the emhass webserver at: http://"+web_ui_url+":"+str(port))
app.logger.info("Home Assistant data fetch will be performed using url: "+hass_url)
app.logger.info("The data path is: "+str(data_path))
app.logger.info("Using core emhass version: "+version('emhass'))
try:
app.logger.info("Using core emhass version: "+version('emhass'))
except PackageNotFoundError:
app.logger.info("Using development emhass version")
serve(app, host=web_ui_url, port=port, threads=8)
14 changes: 14 additions & 0 deletions tests/test_command_line_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,20 @@ def test_naive_mpc_optim(self):
action, logger, get_data_from_file=True)
opt_res_last = publish_data(input_data_dict, logger, opt_res_latest=opt_res)
self.assertTrue(len(opt_res_last)==1)
# Check if status is published
from datetime import datetime
now_precise = datetime.now(input_data_dict['retrieve_hass_conf']['time_zone']).replace(second=0, microsecond=0)
idx_closest = opt_res.index.get_indexer([now_precise], method='nearest')[0]
custom_cost_fun_id = {"entity_id": "sensor.optim_status", "unit_of_measurement": "", "friendly_name": "EMHASS optimization status"}
publish_prefix = ""
response, data = input_data_dict['rh'].post_data(opt_res['optim_status'], idx_closest,
custom_cost_fun_id["entity_id"],
custom_cost_fun_id["unit_of_measurement"],
custom_cost_fun_id["friendly_name"],
type_var = 'optim_status',
publish_prefix = publish_prefix)
self.assertTrue(hasattr(response, '__class__'))
self.assertTrue(data['attributes']['friendly_name'] == 'EMHASS optimization status')

def test_forecast_model_fit_predict_tune(self):
config_path = pathlib.Path(root+'/config_emhass.yaml')
Expand Down
2 changes: 2 additions & 0 deletions tests/test_forecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,8 @@ def test_get_weather_forecast_csv(self):
self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone)
self.assertEqual(len(self.df_weather_csv), len(P_PV_forecast))
df_weather_none = self.fcst.get_weather_forecast(method='none')
self.assertTrue(df_weather_none == None)

def test_get_weather_forecast_mlforecaster(self):
pass
Expand Down
21 changes: 16 additions & 5 deletions tests/test_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,6 @@ def test_perform_dayahead_forecast_optim(self):
self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns)
self.assertTrue(self.opt_res_dayahead['P_deferrable0'].sum()*(
self.retrieve_hass_conf['freq'].seconds/3600) == self.optim_conf['P_deferrable_nom'][0]*self.optim_conf['def_total_hours'][0])
# Testing estimation of the current index
now_precise = datetime.now(self.input_data_dict['retrieve_hass_conf']['time_zone']).replace(second=0, microsecond=0)
idx_closest = self.opt_res_dayahead.index.get_indexer([now_precise], method='ffill')[0]
idx_closest = self.opt_res_dayahead.index.get_indexer([now_precise], method='nearest')[0]
# Test the battery, dynamics and grid exchange contraints
self.optim_conf.update({'set_use_battery': True})
self.optim_conf.update({'set_nocharge_from_grid': True})
Expand All @@ -103,6 +99,8 @@ def test_perform_dayahead_forecast_optim(self):
table = opt_res[cost_cols].reset_index().sum(numeric_only=True).to_frame(name='Cost Totals').reset_index()
self.assertTrue(table.columns[0]=='index')
self.assertTrue(table.columns[1]=='Cost Totals')
# Check status
self.assertTrue('optim_status' in self.opt_res_dayahead.columns)

def test_perform_dayahead_forecast_optim_costfun_selfconso(self):
costfun = 'self-consumption'
Expand Down Expand Up @@ -136,7 +134,6 @@ def test_perform_dayahead_forecast_optim_aux(self):
self.optim_conf['treat_def_as_semi_cont'] = [False, False]
self.optim_conf['set_total_pv_sell'] = True
self.optim_conf['set_def_constant'] = [True, True]
# self.optim_conf['lp_solver'] = 'GLPK_CMD'
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
Expand All @@ -147,6 +144,20 @@ def test_perform_dayahead_forecast_optim_aux(self):
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
import pulp as pl
solver_list = pl.listSolvers(onlyAvailable=True)
for solver in solver_list:
self.optim_conf['lp_solver'] = solver
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead)
self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead)
self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast)
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)

def test_perform_naive_mpc_optim(self):
self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead)
Expand Down
Loading
Loading