diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 41c3b13e..d26cba5a 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -32,10 +32,22 @@ // Configure properties specific to VS Code. "vscode": { // Add the IDs of extensions you want installed when the container is created. - "extensions": ["ms-python.debugpy", "ms-python.python"] + "extensions": ["ms-python.debugpy", "ms-python.python","charliermarsh.ruff"], + "settings": { + "[python]": { + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll": "explicit", + "source.organizeImports": "explicit" + }, + "editor.defaultFormatter": "charliermarsh.ruff" + } + } } }, + + "postCreateCommand": ["pip3", "install", "requests-mock", "--break-system-packages"] } diff --git a/scripts/load_clustering.py b/scripts/load_clustering.py index f627d8f1..53771a3a 100644 --- a/scripts/load_clustering.py +++ b/scripts/load_clustering.py @@ -1,98 +1,104 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -import numpy as np +import copy +import pathlib +import pickle + import pandas as pd -import pathlib, pickle, copy, time import plotly.express as px import plotly.io as pio -pio.renderers.default = 'browser' -pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_secrets, build_params +pio.renderers.default = "browser" +pd.options.plotting.backend = "plotly" from sklearn.cluster import KMeans -from sklearn.linear_model import LinearRegression -from sklearn.linear_model import ElasticNet -from sklearn.neighbors import KNeighborsRegressor -from sklearn.metrics import r2_score, silhouette_score - -from skforecast.ForecasterAutoreg import ForecasterAutoreg -from skforecast.model_selection import bayesian_search_forecaster -from skforecast.model_selection import backtesting_forecaster -from skforecast.utils import save_forecaster -from skforecast.utils import load_forecaster -# from skopt.space import Categorical, Real, Integer +from sklearn.metrics import silhouette_score +# from skopt.space import Categorical, Real, Integer from tslearn.clustering import TimeSeriesKMeans -from tslearn.datasets import CachedDatasets -from tslearn.preprocessing import TimeSeriesScalerMeanVariance, \ - TimeSeriesResampler +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_params, + build_secrets, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=True) -if __name__ == '__main__': - +if __name__ == "__main__": days_to_retrieve = 240 model_type = "load_clustering" var_model = "sensor.power_load_positive" # Build params with no config and default secrets - data_path = emhass_conf['data_path'] / str('data_train_'+model_type+'.pkl') - _,secrets = build_secrets(emhass_conf,logger,no_response=True) - params = build_params(emhass_conf,secrets,{},logger) - template = 'presentation' + data_path = emhass_conf["data_path"] / str("data_train_" + model_type + ".pkl") + _, secrets = build_secrets(emhass_conf, logger, no_response=True) + params = build_params(emhass_conf, secrets, {}, logger) + template = "presentation" if data_path.is_file(): logger.info("Loading a previous data file") with open(data_path, "rb") as fid: data, var_model = pickle.load(fid) else: - logger.info("Using EMHASS methods to retrieve the new forecast model train data") - retrieve_hass_conf, _, _ = get_yaml_parse(params,logger) - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger, get_data_from_file=False) + logger.info( + "Using EMHASS methods to retrieve the new forecast model train data" + ) + retrieve_hass_conf, _, _ = get_yaml_parse(params, logger) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + get_data_from_file=False, + ) days_list = get_days_list(days_to_retrieve) var_list = [var_model] rh.get_data(days_list, var_list) - - with open(data_path, 'wb') as fid: + + with open(data_path, "wb") as fid: pickle.dump((rh.df_final, var_model), fid, pickle.HIGHEST_PROTOCOL) data = copy.deepcopy(rh.df_final) - + logger.info(data.describe()) - + # Plot the input data fig = data.plot() fig.layout.template = template - fig.update_yaxes(title_text = "Power (W)") - fig.update_xaxes(title_text = "Time") + fig.update_yaxes(title_text="Power (W)") + fig.update_xaxes(title_text="Time") fig.show() - + data_lag = pd.concat([data, data.shift()], axis=1) - data_lag.columns = ['power_load y(t)', 'power_load y(t+1)'] + data_lag.columns = ["power_load y(t)", "power_load y(t+1)"] data_lag = data_lag.dropna() - - fig2 = data_lag.plot.scatter(x='power_load y(t)', y='power_load y(t+1)', c='DarkBlue') + + fig2 = data_lag.plot.scatter( + x="power_load y(t)", y="power_load y(t+1)", c="DarkBlue" + ) fig2.layout.template = template fig2.show() - + # Elbow method to check how many clusters # distortions = [] # K = range(1,12) @@ -101,51 +107,66 @@ # kmeans = KMeans(n_clusters=cluster_size, init='k-means++') # kmeans = kmeans.fit(data_lag) # distortions.append(kmeans.inertia_) - + # df = pd.DataFrame({'Clusters': K, 'Distortions': distortions}) # fig = (px.line(df, x='Clusters', y='Distortions', template=template)).update_traces(mode='lines+markers') # fig.show() - + # The silouhette method silhouette_scores = [] - K = range(2,12) + K = range(2, 12) for cluster_size in K: - kmeans = KMeans(n_clusters=cluster_size, init='k-means++', random_state=200) + kmeans = KMeans(n_clusters=cluster_size, init="k-means++", random_state=200) labels = kmeans.fit(data_lag).labels_ - silhouette_score_tmp = silhouette_score(data_lag, labels, metric='euclidean', - sample_size=1000, random_state=200) + silhouette_score_tmp = silhouette_score( + data_lag, labels, metric="euclidean", sample_size=1000, random_state=200 + ) silhouette_scores.append(silhouette_score_tmp) - df = pd.DataFrame({'Clusters': K, 'Silhouette Score': silhouette_scores}) - fig = (px.line(df, x='Clusters', y='Silhouette Score', template=template)).update_traces(mode='lines+markers') + df = pd.DataFrame({"Clusters": K, "Silhouette Score": silhouette_scores}) + fig = ( + px.line(df, x="Clusters", y="Silhouette Score", template=template) + ).update_traces(mode="lines+markers") fig.show() - + # The clustering - kmeans = KMeans(n_clusters=6, init='k-means++') + kmeans = KMeans(n_clusters=6, init="k-means++") kmeans = kmeans.fit(data_lag) - data_lag['cluster_group'] = kmeans.labels_ - - fig = px.scatter(data_lag, x='power_load y(t)', y='power_load y(t+1)', color='cluster_group', template=template) + data_lag["cluster_group"] = kmeans.labels_ + + fig = px.scatter( + data_lag, + x="power_load y(t)", + y="power_load y(t+1)", + color="cluster_group", + template=template, + ) fig.show() - + km = TimeSeriesKMeans(n_clusters=6, verbose=True, random_state=200) y_pred = km.fit_predict(data_lag) - data_lag['cluster_group_tslearn_euclidean'] = y_pred - - fig = px.scatter(data_lag, x='power_load y(t)', y='power_load y(t+1)', color='cluster_group_tslearn_euclidean', template=template) + data_lag["cluster_group_tslearn_euclidean"] = y_pred + + fig = px.scatter( + data_lag, + x="power_load y(t)", + y="power_load y(t+1)", + color="cluster_group_tslearn_euclidean", + template=template, + ) fig.show() - + # dba_km = TimeSeriesKMeans(n_clusters=6, n_init=2, metric="dtw", verbose=True, max_iter_barycenter=10, random_state=200) # y_pred = dba_km.fit_predict(data_lag) # data_lag['cluster_group_tslearn_dba'] = y_pred - + # fig = px.scatter(data_lag, x='power_load y(t)', y='power_load y(t+1)', color='cluster_group_tslearn_dba', template=template) # fig.show() - + # sdtw_km = TimeSeriesKMeans(n_clusters=6, metric="softdtw", metric_params={"gamma": .01}, verbose=True, random_state=200) # y_pred = sdtw_km.fit_predict(data_lag) # data_lag['cluster_group_tslearn_sdtw'] = y_pred - + # fig = px.scatter(data_lag, x='power_load y(t)', y='power_load y(t+1)', color='cluster_group_tslearn_sdtw', template=template) - # fig.show() \ No newline at end of file + # fig.show() diff --git a/scripts/load_forecast_sklearn.py b/scripts/load_forecast_sklearn.py index 8e09215d..db931675 100644 --- a/scripts/load_forecast_sklearn.py +++ b/scripts/load_forecast_sklearn.py @@ -1,282 +1,369 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +import copy +import pathlib +import pickle +import time + import numpy as np import pandas as pd -import pathlib, pickle, copy, time import plotly.io as pio -pio.renderers.default = 'browser' -pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_secrets, build_params +pio.renderers.default = "browser" +pd.options.plotting.backend = "plotly" -from sklearn.linear_model import LinearRegression -from sklearn.linear_model import ElasticNet -from sklearn.neighbors import KNeighborsRegressor +from skforecast.ForecasterAutoreg import ForecasterAutoreg +from skforecast.model_selection import ( + backtesting_forecaster, + bayesian_search_forecaster, +) +from skforecast.utils import load_forecaster, save_forecaster +from sklearn.linear_model import ElasticNet, LinearRegression from sklearn.metrics import r2_score +from sklearn.neighbors import KNeighborsRegressor + +from emhass.forecast import Forecast +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_params, + build_secrets, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) -from skforecast.ForecasterAutoreg import ForecasterAutoreg -from skforecast.model_selection import bayesian_search_forecaster -from skforecast.model_selection import backtesting_forecaster -from skforecast.utils import save_forecaster -from skforecast.utils import load_forecaster # from skopt.space import Categorical, Real, Integer # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['docs_path'] = root / 'docs/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["docs_path"] = root / "docs/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=True) + def add_date_features(data): df = copy.deepcopy(data) - df['year'] = [i.year for i in df.index] - df['month'] = [i.month for i in df.index] - df['day_of_week'] = [i.dayofweek for i in df.index] - df['day_of_year'] = [i.dayofyear for i in df.index] - df['day'] = [i.day for i in df.index] - df['hour'] = [i.hour for i in df.index] + df["year"] = [i.year for i in df.index] + df["month"] = [i.month for i in df.index] + df["day_of_week"] = [i.dayofweek for i in df.index] + df["day_of_year"] = [i.dayofyear for i in df.index] + df["day"] = [i.day for i in df.index] + df["hour"] = [i.hour for i in df.index] return df + def neg_r2_score(y_true, y_pred): return -r2_score(y_true, y_pred) -if __name__ == '__main__': +if __name__ == "__main__": days_to_retrieve = 240 model_type = "load_forecast" var_model = "sensor.power_load_no_var_loads" sklearn_model = "KNeighborsRegressor" num_lags = 48 - + # Build params with no config and default secrets - data_path = emhass_conf['data_path'] / str('data_train_'+model_type+'.pkl') - _,secrets = build_secrets(emhass_conf,logger,no_response=True) - params = build_params(emhass_conf,secrets,{},logger) - template = 'presentation' + data_path = emhass_conf["data_path"] / str("data_train_" + model_type + ".pkl") + _, secrets = build_secrets(emhass_conf, logger, no_response=True) + params = build_params(emhass_conf, secrets, {}, logger) + template = "presentation" if data_path.is_file(): logger.info("Loading a previous data file") with open(data_path, "rb") as fid: data, var_model = pickle.load(fid) else: - logger.info("Using EMHASS methods to retrieve the new forecast model train data") - retrieve_hass_conf, _, _ = get_yaml_parse(params,logger) - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger, get_data_from_file=False) + logger.info( + "Using EMHASS methods to retrieve the new forecast model train data" + ) + retrieve_hass_conf, _, _ = get_yaml_parse(params, logger) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + get_data_from_file=False, + ) days_list = get_days_list(days_to_retrieve) var_list = [var_model] rh.get_data(days_list, var_list) - - with open(data_path, 'wb') as fid: + + with open(data_path, "wb") as fid: pickle.dump((rh.df_final, var_model), fid, pickle.HIGHEST_PROTOCOL) data = copy.deepcopy(rh.df_final) - + logger.info(data.describe()) fig = data.plot() fig.layout.template = template - fig.update_yaxes(title_text = "Power (W)") - fig.update_xaxes(title_text = "Time") + fig.update_yaxes(title_text="Power (W)") + fig.update_xaxes(title_text="Time") fig.show() - fig.write_image(emhass_conf['docs_path'] / "images/inputs_power_load_forecast.svg", width=1080, height=0.8*1080) + fig.write_image( + emhass_conf["docs_path"] / "images/inputs_power_load_forecast.svg", + width=1080, + height=0.8 * 1080, + ) data.index = pd.to_datetime(data.index) data.sort_index(inplace=True) - data = data[~data.index.duplicated(keep='first')] - + data = data[~data.index.duplicated(keep="first")] + data_exo = pd.DataFrame(index=data.index) data_exo = add_date_features(data_exo) data_exo[var_model] = data[var_model] - data_exo = data_exo.interpolate(method='linear', axis=0, limit=None) - - date_train = data_exo.index[-1]-pd.Timedelta('15days')+data_exo.index.freq # The last 15 days - date_split = data_exo.index[-1]-pd.Timedelta('48h')+data_exo.index.freq # The last 48h - data_train = data_exo.loc[:date_split,:] - data_test = data_exo.loc[date_split:,:] + data_exo = data_exo.interpolate(method="linear", axis=0, limit=None) + + date_train = ( + data_exo.index[-1] - pd.Timedelta("15days") + data_exo.index.freq + ) # The last 15 days + date_split = ( + data_exo.index[-1] - pd.Timedelta("48h") + data_exo.index.freq + ) # The last 48h + data_train = data_exo.loc[:date_split, :] + data_test = data_exo.loc[date_split:, :] steps = len(data_test) - - if sklearn_model == 'LinearRegression': + + if sklearn_model == "LinearRegression": base_model = LinearRegression() - elif sklearn_model == 'ElasticNet': + elif sklearn_model == "ElasticNet": base_model = ElasticNet() - elif sklearn_model == 'KNeighborsRegressor': + elif sklearn_model == "KNeighborsRegressor": base_model = KNeighborsRegressor() else: - logger.error("Passed sklearn model "+sklearn_model+" is not valid") - - forecaster = ForecasterAutoreg( - regressor = base_model, - lags = num_lags - ) + logger.error("Passed sklearn model " + sklearn_model + " is not valid") + + forecaster = ForecasterAutoreg(regressor=base_model, lags=num_lags) logger.info("Training a KNN regressor") start_time = time.time() - forecaster.fit(y=data_train[var_model], - exog=data_train.drop(var_model, axis=1)) + forecaster.fit(y=data_train[var_model], exog=data_train.drop(var_model, axis=1)) logger.info(f"Elapsed time: {time.time() - start_time}") - + # Predictions - predictions = forecaster.predict(steps=steps, exog=data_train.drop(var_model, axis=1)) - pred_metric = r2_score(data_test[var_model],predictions) + predictions = forecaster.predict( + steps=steps, exog=data_train.drop(var_model, axis=1) + ) + pred_metric = r2_score(data_test[var_model], predictions) logger.info(f"Prediction R2 score: {pred_metric}") - + # Plot - df = pd.DataFrame(index=data_exo.index,columns=['train','test','pred']) - df['train'] = data_train[var_model] - df['test'] = data_test[var_model] - df['pred'] = predictions + df = pd.DataFrame(index=data_exo.index, columns=["train", "test", "pred"]) + df["train"] = data_train[var_model] + df["test"] = data_test[var_model] + df["pred"] = predictions fig = df.plot() fig.layout.template = template - fig.update_yaxes(title_text = "Power (W)") - fig.update_xaxes(title_text = "Time") - fig.update_xaxes(range=[date_train+pd.Timedelta('10days'), data_exo.index[-1]]) + fig.update_yaxes(title_text="Power (W)") + fig.update_xaxes(title_text="Time") + fig.update_xaxes(range=[date_train + pd.Timedelta("10days"), data_exo.index[-1]]) fig.show() - fig.write_image(emhass_conf['docs_path'] / "images/load_forecast_knn_bare.svg", width=1080, height=0.8*1080) - + fig.write_image( + emhass_conf["docs_path"] / "images/load_forecast_knn_bare.svg", + width=1080, + height=0.8 * 1080, + ) + logger.info("Simple backtesting") start_time = time.time() metric, predictions_backtest = backtesting_forecaster( - forecaster = forecaster, - y = data_train[var_model], - exog = data_train.drop(var_model, axis=1), - initial_train_size = None, - fixed_train_size = False, - steps = num_lags, - metric = neg_r2_score, - refit = False, - verbose = False + forecaster=forecaster, + y=data_train[var_model], + exog=data_train.drop(var_model, axis=1), + initial_train_size=None, + fixed_train_size=False, + steps=num_lags, + metric=neg_r2_score, + refit=False, + verbose=False, ) logger.info(f"Elapsed time: {time.time() - start_time}") logger.info(f"Backtest R2 score: {-metric}") - - df = pd.DataFrame(index=data_exo.index,columns=['train','pred']) - df['train'] = data_exo[var_model] - df['pred'] = predictions_backtest + + df = pd.DataFrame(index=data_exo.index, columns=["train", "pred"]) + df["train"] = data_exo[var_model] + df["pred"] = predictions_backtest fig = df.plot() fig.layout.template = template - fig.update_yaxes(title_text = "Power (W)") - fig.update_xaxes(title_text = "Time") + fig.update_yaxes(title_text="Power (W)") + fig.update_xaxes(title_text="Time") fig.show() - fig.write_image(emhass_conf['docs_path'] / "images/load_forecast_knn_bare_backtest.svg", width=1080, height=0.8*1080) - + fig.write_image( + emhass_conf["docs_path"] / "images/load_forecast_knn_bare_backtest.svg", + width=1080, + height=0.8 * 1080, + ) + # Bayesian search hyperparameter and lags with Skopt - + # Lags used as predictors lags_grid = [6, 12, 24, 36, 48, 60, 72] # Regressor hyperparameters search space - search_space = {'n_neighbors': Integer(2, 20, "uniform", name='n_neighbors'), - 'leaf_size': Integer(20, 40, "log-uniform", name='leaf_size'), - 'weights': Categorical(['uniform', 'distance'], name='weights') - } + search_space = { + "n_neighbors": Integer(2, 20, "uniform", name="n_neighbors"), + "leaf_size": Integer(20, 40, "log-uniform", name="leaf_size"), + "weights": Categorical(["uniform", "distance"], name="weights"), + } logger.info("Backtesting and bayesian hyperparameter optimization") start_time = time.time() results, optimize_results_object = bayesian_search_forecaster( - forecaster = forecaster, - y = data_train[var_model], - exog = data_train.drop(var_model, axis=1), - lags_grid = lags_grid, - search_space = search_space, - steps = num_lags, - metric = neg_r2_score, - refit = True, - initial_train_size = len(data_exo.loc[:date_train]), - fixed_train_size = True, - n_trials = 10, - random_state = 123, - return_best = True, - verbose = False, - engine = 'skopt', - kwargs_gp_minimize = {} + forecaster=forecaster, + y=data_train[var_model], + exog=data_train.drop(var_model, axis=1), + lags_grid=lags_grid, + search_space=search_space, + steps=num_lags, + metric=neg_r2_score, + refit=True, + initial_train_size=len(data_exo.loc[:date_train]), + fixed_train_size=True, + n_trials=10, + random_state=123, + return_best=True, + verbose=False, + engine="skopt", + kwargs_gp_minimize={}, ) logger.info(f"Elapsed time: {time.time() - start_time}") logger.info(results) logger.info(optimize_results_object) - - save_forecaster(forecaster, file_name='forecaster.py', verbose=False) - - forecaster_loaded = load_forecaster('forecaster.py', verbose=False) - predictions_loaded = forecaster.predict(steps=steps, exog=data_train.drop(var_model, axis=1)) - - df = pd.DataFrame(index=data_exo.index,columns=['train','test','pred','pred_naive','pred_optim']) - freq_hours = df.index.freq.delta.seconds/3600 - lags_opt = int(np.round(len(results.iloc[0]['lags']))) - days_needed = int(np.round(lags_opt*freq_hours/24)) - shift = int(24/freq_hours) + + save_forecaster(forecaster, file_name="forecaster.py", verbose=False) + + forecaster_loaded = load_forecaster("forecaster.py", verbose=False) + predictions_loaded = forecaster.predict( + steps=steps, exog=data_train.drop(var_model, axis=1) + ) + + df = pd.DataFrame( + index=data_exo.index, + columns=["train", "test", "pred", "pred_naive", "pred_optim"], + ) + freq_hours = df.index.freq.delta.seconds / 3600 + lags_opt = int(np.round(len(results.iloc[0]["lags"]))) + days_needed = int(np.round(lags_opt * freq_hours / 24)) + shift = int(24 / freq_hours) P_load_forecast_naive = pd.concat([data_exo.iloc[-shift:], data_exo.iloc[:-shift]]) - df['train'] = data_train[var_model] - df['test'] = data_test[var_model] - df['pred'] = predictions - df['pred_naive'] = P_load_forecast_naive[var_model].values - df['pred_optim'] = predictions_loaded + df["train"] = data_train[var_model] + df["test"] = data_test[var_model] + df["pred"] = predictions + df["pred_naive"] = P_load_forecast_naive[var_model].values + df["pred_optim"] = predictions_loaded fig = df.plot() fig.layout.template = template - fig.update_yaxes(title_text = "Power (W)") - fig.update_xaxes(title_text = "Time") - fig.update_xaxes(range=[date_train+pd.Timedelta('10days'), data_exo.index[-1]]) + fig.update_yaxes(title_text="Power (W)") + fig.update_xaxes(title_text="Time") + fig.update_xaxes(range=[date_train + pd.Timedelta("10days"), data_exo.index[-1]]) fig.show() - fig.write_image(emhass_conf['docs_path'] / "images/load_forecast_knn_optimized.svg", width=1080, height=0.8*1080) - - logger.info("######################## Train/Test R2 score comparison ######################## ") - pred_naive_metric_train = r2_score(df.loc[data_train.index,'train'],df.loc[data_train.index,'pred_naive']) - logger.info(f"R2 score for naive prediction in train period (backtest): {pred_naive_metric_train}") - pred_optim_metric_train = -results.iloc[0]['neg_r2_score'] - logger.info(f"R2 score for optimized prediction in train period: {pred_optim_metric_train}") - - pred_metric_test = r2_score(df.loc[data_test.index[1:-1],'test'],df.loc[data_test[1:-1].index,'pred']) - logger.info(f"R2 score for non-optimized prediction in test period: {pred_metric_test}") - pred_naive_metric_test = r2_score(df.loc[data_test.index[1:-1],'test'],df.loc[data_test[1:-1].index,'pred_naive']) - logger.info(f"R2 score for naive persistance forecast in test period: {pred_naive_metric_test}") - pred_optim_metric_test = r2_score(df.loc[data_test.index[1:-1],'test'],df.loc[data_test[1:-1].index,'pred_optim']) - logger.info(f"R2 score for optimized prediction in test period: {pred_optim_metric_test}") - logger.info("################################################################################ ") - - logger.info("Number of optimal lags obtained: "+str(lags_opt)) + fig.write_image( + emhass_conf["docs_path"] / "images/load_forecast_knn_optimized.svg", + width=1080, + height=0.8 * 1080, + ) + + logger.info( + "######################## Train/Test R2 score comparison ######################## " + ) + pred_naive_metric_train = r2_score( + df.loc[data_train.index, "train"], df.loc[data_train.index, "pred_naive"] + ) + logger.info( + f"R2 score for naive prediction in train period (backtest): {pred_naive_metric_train}" + ) + pred_optim_metric_train = -results.iloc[0]["neg_r2_score"] + logger.info( + f"R2 score for optimized prediction in train period: {pred_optim_metric_train}" + ) + + pred_metric_test = r2_score( + df.loc[data_test.index[1:-1], "test"], df.loc[data_test[1:-1].index, "pred"] + ) + logger.info( + f"R2 score for non-optimized prediction in test period: {pred_metric_test}" + ) + pred_naive_metric_test = r2_score( + df.loc[data_test.index[1:-1], "test"], + df.loc[data_test[1:-1].index, "pred_naive"], + ) + logger.info( + f"R2 score for naive persistance forecast in test period: {pred_naive_metric_test}" + ) + pred_optim_metric_test = r2_score( + df.loc[data_test.index[1:-1], "test"], + df.loc[data_test[1:-1].index, "pred_optim"], + ) + logger.info( + f"R2 score for optimized prediction in test period: {pred_optim_metric_test}" + ) + logger.info( + "################################################################################ " + ) + + logger.info("Number of optimal lags obtained: " + str(lags_opt)) logger.info("Prediction in production using last_window") - + # Let's perform a naive load forecast for comparison - retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(emhass_conf, use_secrets=True) - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger) - P_load_forecast = fcst.get_load_forecast(method='naive') - + retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse( + emhass_conf, use_secrets=True + ) + fcst = Forecast( + retrieve_hass_conf, optim_conf, plant_conf, params, emhass_conf, logger + ) + P_load_forecast = fcst.get_load_forecast(method="naive") + # Then retrieve some data and perform a prediction mocking a production env - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger, get_data_from_file=False) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + get_data_from_file=False, + ) days_list = get_days_list(days_needed) - var_model = retrieve_hass_conf['sensor_power_load_no_var_loads'] + var_model = retrieve_hass_conf["sensor_power_load_no_var_loads"] var_list = [var_model] rh.get_data(days_list, var_list) data_last_window = copy.deepcopy(rh.df_final) - + data_last_window = add_date_features(data_last_window) - data_last_window = data_last_window.interpolate(method='linear', axis=0, limit=None) - - predictions_prod = forecaster.predict(steps=lags_opt, - last_window=data_last_window[var_model], - exog=data_last_window.drop(var_model, axis=1)) - - df = pd.DataFrame(index=P_load_forecast.index,columns=['pred_naive','pred_prod']) - df['pred_naive'] = P_load_forecast - df['pred_prod'] = predictions_prod + data_last_window = data_last_window.interpolate(method="linear", axis=0, limit=None) + + predictions_prod = forecaster.predict( + steps=lags_opt, + last_window=data_last_window[var_model], + exog=data_last_window.drop(var_model, axis=1), + ) + + df = pd.DataFrame(index=P_load_forecast.index, columns=["pred_naive", "pred_prod"]) + df["pred_naive"] = P_load_forecast + df["pred_prod"] = predictions_prod fig = df.plot() fig.layout.template = template - fig.update_yaxes(title_text = "Power (W)") - fig.update_xaxes(title_text = "Time") + fig.update_yaxes(title_text="Power (W)") + fig.update_xaxes(title_text="Time") fig.show() - fig.write_image(emhass_conf['docs_path'] / "images/load_forecast_production.svg", width=1080, height=0.8*1080) \ No newline at end of file + fig.write_image( + emhass_conf["docs_path"] / "images/load_forecast_production.svg", + width=1080, + height=0.8 * 1080, + ) diff --git a/scripts/optim_results_analysis.py b/scripts/optim_results_analysis.py index 07909805..0375058e 100644 --- a/scripts/optim_results_analysis.py +++ b/scripts/optim_results_analysis.py @@ -1,141 +1,214 @@ # -*- coding: utf-8 -*- -import json +import pathlib import pickle -import numpy as np + import pandas as pd -import pathlib -import plotly.express as px -import plotly.subplots as sp import plotly.io as pio -pio.renderers.default = 'browser' + +pio.renderers.default = "browser" pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_config, build_secrets, build_params +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_config, + build_params, + build_secrets, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['docs_path'] = root / 'docs/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["docs_path"] = root / "docs/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file): - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - df_weather = fcst.get_weather_forecast(method=optim_conf['weather_forecast_method']) +def get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file +): + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + df_weather = fcst.get_weather_forecast(method=optim_conf["weather_forecast_method"]) P_PV_forecast = fcst.get_power_from_weather(df_weather) - P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) + P_load_forecast = fcst.get_load_forecast(method=optim_conf["load_forecast_method"]) df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) - df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, - fcst.var_load_cost, fcst.var_prod_price, - 'profit', emhass_conf, logger) + df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"] + opt = Optimization( + retrieve_hass_conf, + optim_conf, + plant_conf, + fcst.var_load_cost, + fcst.var_prod_price, + "profit", + emhass_conf, + logger, + ) return fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt -if __name__ == '__main__': + +if __name__ == "__main__": show_figures = False save_figures = False save_html = False get_data_from_file = True # Build params with default config and default secrets - config = build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = build_secrets(emhass_conf,logger,no_response=True) - params = build_params(emhass_conf,secrets,config,logger) + config = build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + _, secrets = build_secrets(emhass_conf, logger, no_response=True) + params = build_params(emhass_conf, secrets, config, logger) # if get_data_from_file: # retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse({},logger) # else: - retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params,logger) - retrieve_hass_conf, optim_conf, plant_conf = \ - retrieve_hass_conf, optim_conf, plant_conf - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params, logger) + retrieve_hass_conf, optim_conf, plant_conf = ( + retrieve_hass_conf, + optim_conf, + plant_conf, + ) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) if get_data_from_file: - with open(pathlib.Path(emhass_conf['data_path'] / 'test_df_final.pkl'), 'rb') as inp: + with open( + pathlib.Path(emhass_conf["data_path"] / "test_df_final.pkl"), "rb" + ) as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] else: - days_list = get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + days_list = get_days_list(retrieve_hass_conf["historic_days_to_retrieve"]) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, var_list, minimal_response=False, significant_changes_only=False + ) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() - - fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = \ - get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file) + + fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = ( + get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file + ) + ) df_input_data = fcst.get_load_cost_forecast(df_input_data) df_input_data = fcst.get_prod_price_forecast(df_input_data) - - template = 'presentation' - + + template = "presentation" + # Let's plot the input data - fig_inputs1 = df_input_data[[retrieve_hass_conf['sensor_power_photovoltaics'], - str(retrieve_hass_conf['sensor_power_load_no_var_loads'] + '_positive')]].plot() + fig_inputs1 = df_input_data[ + [ + retrieve_hass_conf["sensor_power_photovoltaics"], + str(retrieve_hass_conf["sensor_power_load_no_var_loads"] + "_positive"), + ] + ].plot() fig_inputs1.layout.template = template - fig_inputs1.update_yaxes(title_text = "Powers (W)") - fig_inputs1.update_xaxes(title_text = "Time") + fig_inputs1.update_yaxes(title_text="Powers (W)") + fig_inputs1.update_xaxes(title_text="Time") if show_figures: fig_inputs1.show() if save_figures: - fig_inputs1.write_image(emhass_conf['docs_path'] / "images/inputs_power.svg", - width=1080, height=0.8*1080) - + fig_inputs1.write_image( + emhass_conf["docs_path"] / "images/inputs_power.svg", + width=1080, + height=0.8 * 1080, + ) + fig_inputs_dah = df_input_data_dayahead.plot() fig_inputs_dah.layout.template = template - fig_inputs_dah.update_yaxes(title_text = "Powers (W)") - fig_inputs_dah.update_xaxes(title_text = "Time") + fig_inputs_dah.update_yaxes(title_text="Powers (W)") + fig_inputs_dah.update_xaxes(title_text="Time") if show_figures: fig_inputs_dah.show() if save_figures: - fig_inputs_dah.write_image(emhass_conf['docs_path'] / "images/inputs_dayahead.svg", - width=1080, height=0.8*1080) - + fig_inputs_dah.write_image( + emhass_conf["docs_path"] / "images/inputs_dayahead.svg", + width=1080, + height=0.8 * 1080, + ) + # And then perform a dayahead optimization df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead) df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead) - optim_conf['treat_deferrable_load_as_semi_cont'] = [True, True] - optim_conf['set_deferrable_load_single_constant'] = [True, True] + optim_conf["treat_deferrable_load_as_semi_cont"] = [True, True] + optim_conf["set_deferrable_load_single_constant"] = [True, True] unit_load_cost = df_input_data[opt.var_load_cost].values unit_prod_price = df_input_data[opt.var_prod_price].values - opt_res_dah = opt.perform_optimization(df_input_data_dayahead, P_PV_forecast.values.ravel(), - P_load_forecast.values.ravel(), - unit_load_cost, unit_prod_price, - debug = True) + opt_res_dah = opt.perform_optimization( + df_input_data_dayahead, + P_PV_forecast.values.ravel(), + P_load_forecast.values.ravel(), + unit_load_cost, + unit_prod_price, + debug=True, + ) # opt_res_dah = opt.perform_dayahead_forecast_optim(df_input_data_dayahead, P_PV_forecast, P_load_forecast) - opt_res_dah['P_PV'] = df_input_data_dayahead[['P_PV_forecast']] - fig_res_dah = opt_res_dah[['P_deferrable0', 'P_deferrable1', 'P_grid', 'P_PV', - ]].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' + opt_res_dah["P_PV"] = df_input_data_dayahead[["P_PV_forecast"]] + fig_res_dah = opt_res_dah[ + [ + "P_deferrable0", + "P_deferrable1", + "P_grid", + "P_PV", + ] + ].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Powers (W)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Powers (W)") + fig_res_dah.update_xaxes(title_text="Time") # if show_figures: fig_res_dah.show() if save_figures: - fig_res_dah.write_image(emhass_conf['docs_path'] / "images/optim_results_PV_defLoads_dayaheadOptim.svg", - width=1080, height=0.8*1080) - - print("System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\ - str(opt_res_dah['cost_profit'].sum())) - + fig_res_dah.write_image( + emhass_conf["docs_path"] + / "images/optim_results_PV_defLoads_dayaheadOptim.svg", + width=1080, + height=0.8 * 1080, + ) + + print( + "System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: " + + str(opt_res_dah["cost_profit"].sum()) + ) + print(opt_res_dah) if save_html: - opt_res_dah.to_html('opt_res_dah.html') \ No newline at end of file + opt_res_dah.to_html("opt_res_dah.html") diff --git a/scripts/read_csv_plot_data.py b/scripts/read_csv_plot_data.py index 2293725b..6f7e5447 100644 --- a/scripts/read_csv_plot_data.py +++ b/scripts/read_csv_plot_data.py @@ -1,80 +1,107 @@ # -*- coding: utf-8 -*- -''' - This is a script for analysis plot. - To use this script you will need plotly and kaleido. Install them using: - pip install plotly - pip install kaleido - Before running this script you should perform a perfect optimization for each type of cost function: - profit, cost and self-consumption -''' +""" +This is a script for analysis plot. +To use this script you will need plotly and kaleido. Install them using: + pip install plotly + pip install kaleido +Before running this script you should perform a perfect optimization for each type of cost function: +profit, cost and self-consumption +""" + +import pathlib + import numpy as np import pandas as pd import plotly.express as px -import plotly.subplots as sp import plotly.io as pio -import pathlib -pio.renderers.default = 'browser' -pd.options.plotting.backend = "plotly" +import plotly.subplots as sp -from emhass.utils import get_root, get_logger +pio.renderers.default = "browser" +pd.options.plotting.backend = "plotly" -if __name__ == '__main__': +from emhass.utils import get_logger, get_root +if __name__ == "__main__": # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} - emhass_conf['data_path'] = root / 'data/' - emhass_conf['root_path'] = root / 'src/emhass/' - emhass_conf['docs_path'] = root / 'docs/' - emhass_conf['config_path'] = root / 'config.json' - emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' - emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' + emhass_conf["data_path"] = root / "data/" + emhass_conf["root_path"] = root / "src/emhass/" + emhass_conf["docs_path"] = root / "docs/" + emhass_conf["config_path"] = root / "config.json" + emhass_conf["defaults_path"] = ( + emhass_conf["root_path"] / "data/config_defaults.json" + ) + emhass_conf["associations_path"] = ( + emhass_conf["root_path"] / "data/associations.csv" + ) # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) # Reading CSV files - path_file = emhass_conf['data_path'] / "opt_res_perfect_optim_cost.csv" - data_cost = pd.read_csv(path_file, index_col='timestamp') + path_file = emhass_conf["data_path"] / "opt_res_perfect_optim_cost.csv" + data_cost = pd.read_csv(path_file, index_col="timestamp") data_cost.index = pd.to_datetime(data_cost.index) - path_file = emhass_conf['data_path'] / "opt_res_perfect_optim_profit.csv" - data_profit = pd.read_csv(path_file, index_col='timestamp') + path_file = emhass_conf["data_path"] / "opt_res_perfect_optim_profit.csv" + data_profit = pd.read_csv(path_file, index_col="timestamp") data_profit.index = pd.to_datetime(data_profit.index) - path_file = emhass_conf['data_path'] / "opt_res_perfect_optim_self-consumption.csv" - data_selfcons = pd.read_csv(path_file, index_col='timestamp') + path_file = emhass_conf["data_path"] / "opt_res_perfect_optim_self-consumption.csv" + data_selfcons = pd.read_csv(path_file, index_col="timestamp") data_selfcons.index = pd.to_datetime(data_selfcons.index) # Creating DF to plot - cols_to_plot = ['P_PV', 'P_Load', 'P_def_sum_cost', 'P_def_sum_profit', 'P_def_sum_selfcons', - 'gain_cost', 'gain_profit', 'gain_selfcons'] + cols_to_plot = [ + "P_PV", + "P_Load", + "P_def_sum_cost", + "P_def_sum_profit", + "P_def_sum_selfcons", + "gain_cost", + "gain_profit", + "gain_selfcons", + ] data = pd.DataFrame(index=data_cost.index, columns=cols_to_plot) - data['P_PV'] = data_cost['P_PV'] - data['P_Load'] = data_cost['P_Load'] - data['P_def_sum_cost'] = (data_cost['P_deferrable0']+data_cost['P_deferrable1']).clip(lower=0) - data['P_def_sum_profit'] = (data_profit['P_deferrable0']+data_profit['P_deferrable1']).clip(lower=0) - data['P_def_sum_selfcons'] = (data_selfcons['P_deferrable0']+data_selfcons['P_deferrable1']).clip(lower=0) - data['gain_cost'] = data_cost['cost_profit'] - data['gain_profit'] = data_profit['cost_profit'] - data['gain_selfcons'] = data_selfcons['cost_profit'] + data["P_PV"] = data_cost["P_PV"] + data["P_Load"] = data_cost["P_Load"] + data["P_def_sum_cost"] = ( + data_cost["P_deferrable0"] + data_cost["P_deferrable1"] + ).clip(lower=0) + data["P_def_sum_profit"] = ( + data_profit["P_deferrable0"] + data_profit["P_deferrable1"] + ).clip(lower=0) + data["P_def_sum_selfcons"] = ( + data_selfcons["P_deferrable0"] + data_selfcons["P_deferrable1"] + ).clip(lower=0) + data["gain_cost"] = data_cost["cost_profit"] + data["gain_profit"] = data_profit["cost_profit"] + data["gain_selfcons"] = data_selfcons["cost_profit"] # Meta parameters save_figs = True - symbols =['circle', 'square', 'diamond', 'star', 'triangle-left', 'triangle-right'] - template = 'presentation' + symbols = ["circle", "square", "diamond", "star", "triangle-left", "triangle-right"] + template = "presentation" symbol_size = 5 - cf = ['cost', 'profit', 'selfcons'] + cf = ["cost", "profit", "selfcons"] # Plotting using plotly - this_figure = sp.make_subplots(rows=4, cols=1, - shared_xaxes=True, vertical_spacing=0.04, - subplot_titles=['System powers: cost function = cost', - 'System powers: cost function = profit', - 'System powers: cost function = self-consumption', - 'Cost function values'], - x_title="Date") - - fig = px.line(data, x=data.index, y=cols_to_plot[0:3], markers=True, - template = template) + this_figure = sp.make_subplots( + rows=4, + cols=1, + shared_xaxes=True, + vertical_spacing=0.04, + subplot_titles=[ + "System powers: cost function = cost", + "System powers: cost function = profit", + "System powers: cost function = self-consumption", + "Cost function values", + ], + x_title="Date", + ) + + fig = px.line( + data, x=data.index, y=cols_to_plot[0:3], markers=True, template=template + ) fig.update_traces(marker=dict(size=symbol_size)) @@ -85,10 +112,15 @@ fig_traces.append(trace_to_append) for traces in fig_traces: this_figure.append_trace(traces, row=1, col=1) - - fig2 = px.line(data, x=data.index, y=cols_to_plot[0:2]+[cols_to_plot[3]], - markers=True, template = template, - color_discrete_sequence=['#1F77B4', '#FF7F0E', '#D62728']) + + fig2 = px.line( + data, + x=data.index, + y=cols_to_plot[0:2] + [cols_to_plot[3]], + markers=True, + template=template, + color_discrete_sequence=["#1F77B4", "#FF7F0E", "#D62728"], + ) fig2.update_traces(marker=dict(size=symbol_size)) @@ -103,10 +135,15 @@ fig_traces.append(trace_to_append) for traces in fig_traces: this_figure.append_trace(traces, row=2, col=1) - - fig3 = px.line(data, x=data.index, y=cols_to_plot[0:2]+[cols_to_plot[4]], - markers=True, template = template, - color_discrete_sequence=['#1F77B4', '#FF7F0E', '#9467BD']) + + fig3 = px.line( + data, + x=data.index, + y=cols_to_plot[0:2] + [cols_to_plot[4]], + markers=True, + template=template, + color_discrete_sequence=["#1F77B4", "#FF7F0E", "#9467BD"], + ) fig3.update_traces(marker=dict(size=symbol_size)) @@ -122,11 +159,11 @@ for traces in fig_traces: this_figure.append_trace(traces, row=3, col=1) - fig4 = px.line(data, x=data.index, y=cols_to_plot[5:], markers=False, - template = template) + fig4 = px.line( + data, x=data.index, y=cols_to_plot[5:], markers=False, template=template + ) - fig4.update_traces(marker=dict(size=symbol_size), - line=dict(dash='solid')) + fig4.update_traces(marker=dict(size=symbol_size), line=dict(dash="solid")) fig_traces = [] for trace in range(len(fig4["data"])): @@ -140,19 +177,31 @@ this_figure.show() if save_figs: - fig_filename = emhass_conf['docs_path'] / "images/optim_results" - this_figure.write_image(str(fig_filename) + ".png", width=1.5*768, height=1.5*1.5*768) - - fig_bar = px.bar(np.arange(len(cf)), x=[c+" (+"+"{:.2f}".format(np.sum(data['gain_'+c])*100/np.sum( - data['gain_profit'])-100)+"%)" for c in cf], - y=[np.sum(data['gain_'+c]) for c in cf], - text=[np.sum(data['gain_'+c]) for c in cf], - template = template) + fig_filename = emhass_conf["docs_path"] / "images/optim_results" + this_figure.write_image( + str(fig_filename) + ".png", width=1.5 * 768, height=1.5 * 1.5 * 768 + ) + + fig_bar = px.bar( + np.arange(len(cf)), + x=[ + c + + " (+" + + "{:.2f}".format( + np.sum(data["gain_" + c]) * 100 / np.sum(data["gain_profit"]) - 100 + ) + + "%)" + for c in cf + ], + y=[np.sum(data["gain_" + c]) for c in cf], + text=[np.sum(data["gain_" + c]) for c in cf], + template=template, + ) fig_bar.update_yaxes(title_text="Cost function total value") - fig_bar.update_traces(texttemplate='%{text:.4s}', textposition='outside') - fig_bar.update_xaxes(title_text = "Cost function") + fig_bar.update_traces(texttemplate="%{text:.4s}", textposition="outside") + fig_bar.update_xaxes(title_text="Cost function") fig_bar.show() if save_figs: - fig_filename = emhass_conf['docs_path'] / "images/optim_results_bar_plot" - fig_bar.write_image(str(fig_filename) + ".png", width=1080, height=0.8*1080) + fig_filename = emhass_conf["docs_path"] / "images/optim_results_bar_plot" + fig_bar.write_image(str(fig_filename) + ".png", width=1080, height=0.8 * 1080) diff --git a/scripts/save_pvlib_module_inverter_database.py b/scripts/save_pvlib_module_inverter_database.py index 9adfe900..ba801478 100644 --- a/scripts/save_pvlib_module_inverter_database.py +++ b/scripts/save_pvlib_module_inverter_database.py @@ -1,63 +1,83 @@ # -*- coding: utf-8 -*- -''' - This is a script for saving the database to be used by PVLib for - modules and inverters models. This was necessary to keep the - database up to date with the latest database version from SAM - while updating the out-dated original database from PVLib. - This script uses the tabulate package: pip install tabulate -''' -import numpy as np -import pandas as pd -import pathlib +""" +This is a script for saving the database to be used by PVLib for +modules and inverters models. This was necessary to keep the +database up to date with the latest database version from SAM +while updating the out-dated original database from PVLib. +This script uses the tabulate package: pip install tabulate +""" + import bz2 +import pathlib import pickle as cPickle + +import pandas as pd import pvlib from tabulate import tabulate -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization -from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger +from emhass.utils import get_logger, get_root # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['docs_path'] = root / 'docs/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["docs_path"] = root / "docs/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -if __name__ == '__main__': - +if __name__ == "__main__": save_new_files = True - logger.info('Reading original outdated database from PVLib') - cec_modules_0 = pvlib.pvsystem.retrieve_sam('CECMod') - cec_inverters_0 = pvlib.pvsystem.retrieve_sam('cecinverter') - logger.info('Reading the downloaded database from SAM') - cec_modules = pvlib.pvsystem.retrieve_sam(path=str(emhass_conf['data_path'] / 'CEC Modules.csv')) - cec_modules = cec_modules.loc[:, ~cec_modules.columns.duplicated()] # Drop column duplicates - cec_inverters = pvlib.pvsystem.retrieve_sam(path=str(emhass_conf['data_path'] / 'CEC Inverters.csv')) - cec_inverters = cec_inverters.loc[:, ~cec_inverters.columns.duplicated()] # Drop column duplicates - logger.info('Updating and saving databases') - cols_to_keep_modules = [elem for elem in list(cec_modules_0.columns) if elem not in list(cec_modules.columns)] + logger.info("Reading original outdated database from PVLib") + cec_modules_0 = pvlib.pvsystem.retrieve_sam("CECMod") + cec_inverters_0 = pvlib.pvsystem.retrieve_sam("cecinverter") + logger.info("Reading the downloaded database from SAM") + cec_modules = pvlib.pvsystem.retrieve_sam( + path=str(emhass_conf["data_path"] / "CEC Modules.csv") + ) + cec_modules = cec_modules.loc[ + :, ~cec_modules.columns.duplicated() + ] # Drop column duplicates + cec_inverters = pvlib.pvsystem.retrieve_sam( + path=str(emhass_conf["data_path"] / "CEC Inverters.csv") + ) + cec_inverters = cec_inverters.loc[ + :, ~cec_inverters.columns.duplicated() + ] # Drop column duplicates + logger.info("Updating and saving databases") + cols_to_keep_modules = [ + elem + for elem in list(cec_modules_0.columns) + if elem not in list(cec_modules.columns) + ] cec_modules = pd.concat([cec_modules, cec_modules_0[cols_to_keep_modules]], axis=1) - cols_to_keep_inverters = [elem for elem in list(cec_inverters_0.columns) if elem not in list(cec_inverters.columns)] - cec_inverters = pd.concat([cec_inverters, cec_inverters_0[cols_to_keep_inverters]], axis=1) - logger.info(f'Number of elements from old database copied in new database for modules = {len(cols_to_keep_modules)}') - logger.info(f'Number of elements from old database copied in new database for inverters = {len(cols_to_keep_inverters)}') - logger.info('Modules databases') - print(tabulate(cec_modules.head(20).iloc[:,:5], headers='keys', tablefmt='psql')) - logger.info('Inverters databases') - print(tabulate(cec_inverters.head(20).iloc[:,:3], headers='keys', tablefmt='psql')) + cols_to_keep_inverters = [ + elem + for elem in list(cec_inverters_0.columns) + if elem not in list(cec_inverters.columns) + ] + cec_inverters = pd.concat( + [cec_inverters, cec_inverters_0[cols_to_keep_inverters]], axis=1 + ) + logger.info( + f"Number of elements from old database copied in new database for modules = {len(cols_to_keep_modules)}" + ) + logger.info( + f"Number of elements from old database copied in new database for inverters = {len(cols_to_keep_inverters)}" + ) + logger.info("Modules databases") + print(tabulate(cec_modules.head(20).iloc[:, :5], headers="keys", tablefmt="psql")) + logger.info("Inverters databases") + print(tabulate(cec_inverters.head(20).iloc[:, :3], headers="keys", tablefmt="psql")) if save_new_files: - with bz2.BZ2File(emhass_conf['root_path'] + '/data/cec_modules.pbz2', "w") as f: - cPickle.dump(cec_modules, f) + with bz2.BZ2File(emhass_conf["root_path"] + "/data/cec_modules.pbz2", "w") as f: + cPickle.dump(cec_modules, f) if save_new_files: - with bz2.BZ2File(emhass_conf['root_path'] + '/data/cec_inverters.pbz2', "w") as f: + with bz2.BZ2File( + emhass_conf["root_path"] + "/data/cec_inverters.pbz2", "w" + ) as f: cPickle.dump(cec_inverters, f) - \ No newline at end of file diff --git a/scripts/script_debug_forecasts.py b/scripts/script_debug_forecasts.py index 0525b759..507dd685 100644 --- a/scripts/script_debug_forecasts.py +++ b/scripts/script_debug_forecasts.py @@ -1,61 +1,75 @@ # -*- coding: utf-8 -*- -import json -import pickle -import numpy as np -import pandas as pd import pathlib -import plotly.express as px -import plotly.subplots as sp + +import pandas as pd import plotly.io as pio -pio.renderers.default = 'browser' + +pio.renderers.default = "browser" pd.options.plotting.backend = "plotly" from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_config, build_secrets, build_params +from emhass.utils import ( + build_config, + build_params, + build_secrets, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['secrets_path'] = root / 'secrets_emhass.yaml' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["secrets_path"] = root / "secrets_emhass.yaml" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -if __name__ == '__main__': - +if __name__ == "__main__": get_data_from_file = True - template = 'presentation' - - methods_list = ['solar.forecast', 'solcast', 'scrapper'] # - + template = "presentation" + + methods_list = ["solar.forecast", "solcast", "scrapper"] # + for k, method in enumerate(methods_list): # Build params with default config, weather_forecast_method=method and default secrets - config = build_config(emhass_conf,logger,emhass_conf['defaults_path']) - config['weather_forecast_method'] = method - _,secrets = build_secrets(emhass_conf,logger,secrets_path=emhass_conf['secrets_path'],no_response=True) - params = build_params(emhass_conf,secrets,config,logger) - - retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params,logger) - optim_conf['delta_forecast_daily'] = pd.Timedelta(days=2) - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) + config = build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + config["weather_forecast_method"] = method + _, secrets = build_secrets( + emhass_conf, + logger, + secrets_path=emhass_conf["secrets_path"], + no_response=True, + ) + params = build_params(emhass_conf, secrets, config, logger) + + retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params, logger) + optim_conf["delta_forecast_daily"] = pd.Timedelta(days=2) + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) df_weather = fcst.get_weather_forecast(method=method) P_PV_forecast = fcst.get_power_from_weather(df_weather) - P_PV_forecast = P_PV_forecast.to_frame(name=f'PV_forecast {method}') + P_PV_forecast = P_PV_forecast.to_frame(name=f"PV_forecast {method}") if k == 0: res_df = P_PV_forecast else: res_df = pd.concat([res_df, P_PV_forecast], axis=1) - + # Plot the PV data fig = res_df.plot() fig.layout.template = template - fig.update_yaxes(title_text = "Powers (W)") - fig.update_xaxes(title_text = "Time") + fig.update_yaxes(title_text="Powers (W)") + fig.update_xaxes(title_text="Time") fig.show() - diff --git a/scripts/script_debug_optim.py b/scripts/script_debug_optim.py index 2ea31cd5..f29742d3 100644 --- a/scripts/script_debug_optim.py +++ b/scripts/script_debug_optim.py @@ -1,134 +1,184 @@ # -*- coding: utf-8 -*- -import json +import pathlib import pickle -import numpy as np + import pandas as pd -import pathlib -import plotly.express as px -import plotly.subplots as sp import plotly.io as pio -pio.renderers.default = 'browser' + +pio.renderers.default = "browser" pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_config, build_params +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_config, + build_params, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -if __name__ == '__main__': +if __name__ == "__main__": get_data_from_file = True show_figures = True - template = 'presentation' - - # Build params with default config (no secrets) - config = build_config(emhass_conf,logger,emhass_conf['defaults_path']) - params = build_params(emhass_conf,{},config,logger) + template = "presentation" + + # Build params with default config (no secrets) + config = build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + params = build_params(emhass_conf, {}, config, logger) retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params, logger) - retrieve_hass_conf, optim_conf, plant_conf = \ - retrieve_hass_conf, optim_conf, plant_conf - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + retrieve_hass_conf, optim_conf, plant_conf = ( + retrieve_hass_conf, + optim_conf, + plant_conf, + ) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) if get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] else: - days_list = get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + days_list = get_days_list(retrieve_hass_conf["historic_days_to_retrieve"]) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, var_list, minimal_response=False, significant_changes_only=False + ) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() - - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - df_weather = fcst.get_weather_forecast(method='csv') + + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + df_weather = fcst.get_weather_forecast(method="csv") P_PV_forecast = fcst.get_power_from_weather(df_weather) - P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) + P_load_forecast = fcst.get_load_forecast(method=optim_conf["load_forecast_method"]) df_input_data = pd.concat([P_PV_forecast, P_load_forecast], axis=1) - df_input_data.columns = ['P_PV_forecast', 'P_load_forecast'] - + df_input_data.columns = ["P_PV_forecast", "P_load_forecast"] + df_input_data = fcst.get_load_cost_forecast(df_input_data) df_input_data = fcst.get_prod_price_forecast(df_input_data) - input_data_dict = {'retrieve_hass_conf': retrieve_hass_conf} - + input_data_dict = {"retrieve_hass_conf": retrieve_hass_conf} + # Set special debug cases - + # Solver configurations - optim_conf.update({'lp_solver': 'PULP_CBC_CMD'}) # set the name of the linear programming solver that will be used. Options are 'PULP_CBC_CMD', 'GLPK_CMD' and 'COIN_CMD'. - optim_conf.update({'lp_solver_path': 'empty'}) # set the path to the LP solver, COIN_CMD default is /usr/bin/cbc - + optim_conf.update( + {"lp_solver": "PULP_CBC_CMD"} + ) # set the name of the linear programming solver that will be used. Options are 'PULP_CBC_CMD', 'GLPK_CMD' and 'COIN_CMD'. + optim_conf.update( + {"lp_solver_path": "empty"} + ) # set the path to the LP solver, COIN_CMD default is /usr/bin/cbc + # Semi continuous and constant values - optim_conf.update({'treat_deferrable_load_as_semi_cont': [True, False]}) - optim_conf.update({'set_deferrable_load_single_constant': [True, False]}) - + optim_conf.update({"treat_deferrable_load_as_semi_cont": [True, False]}) + optim_conf.update({"set_deferrable_load_single_constant": [True, False]}) + # A sequence of values # optim_conf.update({'nominal_power_of_deferrable_loads': [[500.0, 100.0, 100.0, 500.0], 750.0]}) - + # Using a battery - optim_conf.update({'set_use_battery': False}) - optim_conf.update({'set_nocharge_from_grid': False}) - optim_conf.update({'set_battery_dynamic': True}) - optim_conf.update({'set_nodischarge_to_grid': True}) - + optim_conf.update({"set_use_battery": False}) + optim_conf.update({"set_nocharge_from_grid": False}) + optim_conf.update({"set_battery_dynamic": True}) + optim_conf.update({"set_nodischarge_to_grid": True}) + # A hybrid inverter case - plant_conf.update({'inverter_is_hybrid': False}) - + plant_conf.update({"inverter_is_hybrid": False}) + # Setting some negative values on production prices - df_input_data.loc[df_input_data.index[25:30],'unit_prod_price'] = -0.07 - df_input_data['P_PV_forecast'] = df_input_data['P_PV_forecast']*2 - P_PV_forecast = P_PV_forecast*2 - - costfun = 'profit' - opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, - fcst.var_load_cost, fcst.var_prod_price, - costfun, emhass_conf, logger) + df_input_data.loc[df_input_data.index[25:30], "unit_prod_price"] = -0.07 + df_input_data["P_PV_forecast"] = df_input_data["P_PV_forecast"] * 2 + P_PV_forecast = P_PV_forecast * 2 + + costfun = "profit" + opt = Optimization( + retrieve_hass_conf, + optim_conf, + plant_conf, + fcst.var_load_cost, + fcst.var_prod_price, + costfun, + emhass_conf, + logger, + ) opt_res_dayahead = opt.perform_dayahead_forecast_optim( - df_input_data, P_PV_forecast, P_load_forecast) - + df_input_data, P_PV_forecast, P_load_forecast + ) + # Let's plot the input data fig_inputs_dah = df_input_data.plot() fig_inputs_dah.layout.template = template - fig_inputs_dah.update_yaxes(title_text = "Powers (W) and Costs(EUR)") - fig_inputs_dah.update_xaxes(title_text = "Time") + fig_inputs_dah.update_yaxes(title_text="Powers (W) and Costs(EUR)") + fig_inputs_dah.update_xaxes(title_text="Time") if show_figures: fig_inputs_dah.show() - - vars_to_plot = ['P_deferrable0', 'P_deferrable1','P_grid', 'P_PV'] - if plant_conf['inverter_is_hybrid']: - vars_to_plot = vars_to_plot + ['P_hybrid_inverter'] - if plant_conf['compute_curtailment']: - vars_to_plot = vars_to_plot + ['P_PV_curtailment'] - if optim_conf['set_use_battery']: - vars_to_plot = vars_to_plot + ['P_batt'] + ['SOC_opt'] - fig_res_dah = opt_res_dayahead[vars_to_plot].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' + + vars_to_plot = ["P_deferrable0", "P_deferrable1", "P_grid", "P_PV"] + if plant_conf["inverter_is_hybrid"]: + vars_to_plot = vars_to_plot + ["P_hybrid_inverter"] + if plant_conf["compute_curtailment"]: + vars_to_plot = vars_to_plot + ["P_PV_curtailment"] + if optim_conf["set_use_battery"]: + vars_to_plot = vars_to_plot + ["P_batt"] + ["SOC_opt"] + fig_res_dah = opt_res_dayahead[ + vars_to_plot + ].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Powers (W)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Powers (W)") + fig_res_dah.update_xaxes(title_text="Time") if show_figures: fig_res_dah.show() - - print("System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\ - str(opt_res_dayahead['cost_profit'].sum())+", Status: "+opt_res_dayahead['optim_status'].unique().item()) - + + print( + "System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: " + + str(opt_res_dayahead["cost_profit"].sum()) + + ", Status: " + + opt_res_dayahead["optim_status"].unique().item() + ) + print(opt_res_dayahead[vars_to_plot]) - \ No newline at end of file diff --git a/scripts/script_simple_thermal_model.py b/scripts/script_simple_thermal_model.py index 6b95d434..54755448 100644 --- a/scripts/script_simple_thermal_model.py +++ b/scripts/script_simple_thermal_model.py @@ -1,151 +1,215 @@ # -*- coding: utf-8 -*- -import json +import pathlib import pickle import random -import numpy as np + import pandas as pd -import pathlib -import plotly.express as px -import plotly.subplots as sp import plotly.io as pio -pio.renderers.default = 'browser' + +pio.renderers.default = "browser" pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_config, build_params +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_config, + build_params, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -if __name__ == '__main__': +if __name__ == "__main__": get_data_from_file = True show_figures = True - template = 'presentation' - - # Build params with default config (no secrets) - config = build_config(emhass_conf,logger,emhass_conf['defaults_path']) - params = build_params(emhass_conf,{},config,logger) + template = "presentation" + + # Build params with default config (no secrets) + config = build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + params = build_params(emhass_conf, {}, config, logger) retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params, logger) - retrieve_hass_conf, optim_conf, plant_conf = \ - retrieve_hass_conf, optim_conf, plant_conf - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + retrieve_hass_conf, optim_conf, plant_conf = ( + retrieve_hass_conf, + optim_conf, + plant_conf, + ) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) if get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] else: - days_list = get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + days_list = get_days_list(retrieve_hass_conf["historic_days_to_retrieve"]) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, var_list, minimal_response=False, significant_changes_only=False + ) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() - - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - df_weather = fcst.get_weather_forecast(method='csv') + + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + df_weather = fcst.get_weather_forecast(method="csv") P_PV_forecast = fcst.get_power_from_weather(df_weather) - P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) + P_load_forecast = fcst.get_load_forecast(method=optim_conf["load_forecast_method"]) df_input_data = pd.concat([P_PV_forecast, P_load_forecast], axis=1) - df_input_data.columns = ['P_PV_forecast', 'P_load_forecast'] - + df_input_data.columns = ["P_PV_forecast", "P_load_forecast"] + df_input_data = fcst.get_load_cost_forecast(df_input_data) df_input_data = fcst.get_prod_price_forecast(df_input_data) - input_data_dict = {'retrieve_hass_conf': retrieve_hass_conf} - + input_data_dict = {"retrieve_hass_conf": retrieve_hass_conf} + # Set special debug cases - + # Solver configurations - optim_conf.update({'lp_solver': 'PULP_CBC_CMD'}) # set the name of the linear programming solver that will be used. Options are 'PULP_CBC_CMD', 'GLPK_CMD' and 'COIN_CMD'. - optim_conf.update({'lp_solver_path': 'empty'}) # set the path to the LP solver, COIN_CMD default is /usr/bin/cbc - + optim_conf.update( + {"lp_solver": "PULP_CBC_CMD"} + ) # set the name of the linear programming solver that will be used. Options are 'PULP_CBC_CMD', 'GLPK_CMD' and 'COIN_CMD'. + optim_conf.update( + {"lp_solver_path": "empty"} + ) # set the path to the LP solver, COIN_CMD default is /usr/bin/cbc + # Config for a single thermal model - optim_conf.update({'number_of_deferrable_loads': 1}) - optim_conf.update({'nominal_power_of_deferrable_loads': [1000.0]}) - optim_conf.update({'operating_hours_of_each_deferrable_load': [0]}) - optim_conf.update({'start_timesteps_of_each_deferrable_load': [0]}) - optim_conf.update({'end_timesteps_of_each_deferrable_load': [0]}) - optim_conf.update({'treat_deferrable_load_as_semi_cont': [False]}) - optim_conf.update({'set_deferrable_load_single_constant': [False]}) - optim_conf.update({'set_deferrable_startup_penalty': [0.0]}) - + optim_conf.update({"number_of_deferrable_loads": 1}) + optim_conf.update({"nominal_power_of_deferrable_loads": [1000.0]}) + optim_conf.update({"operating_hours_of_each_deferrable_load": [0]}) + optim_conf.update({"start_timesteps_of_each_deferrable_load": [0]}) + optim_conf.update({"end_timesteps_of_each_deferrable_load": [0]}) + optim_conf.update({"treat_deferrable_load_as_semi_cont": [False]}) + optim_conf.update({"set_deferrable_load_single_constant": [False]}) + optim_conf.update({"set_deferrable_startup_penalty": [0.0]}) + # Thermal modeling - df_input_data['outdoor_temperature_forecast'] = [random.normalvariate(10.0, 3.0) for _ in range(48)] - + df_input_data["outdoor_temperature_forecast"] = [ + random.normalvariate(10.0, 3.0) for _ in range(48) + ] + runtimeparams = { - 'def_load_config': [ - {'thermal_config': { - 'heating_rate': 5.0, - 'cooling_constant': 0.1, - 'overshoot_temperature': 24.0, - 'start_temperature': 20, - 'desired_temperatures': [21]*48, + "def_load_config": [ + { + "thermal_config": { + "heating_rate": 5.0, + "cooling_constant": 0.1, + "overshoot_temperature": 24.0, + "start_temperature": 20, + "desired_temperatures": [21] * 48, } } ] } - if 'def_load_config' in runtimeparams: - optim_conf["def_load_config"] = runtimeparams['def_load_config'] + if "def_load_config" in runtimeparams: + optim_conf["def_load_config"] = runtimeparams["def_load_config"] - costfun = 'profit' - opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, - fcst.var_load_cost, fcst.var_prod_price, - costfun, emhass_conf, logger) + costfun = "profit" + opt = Optimization( + retrieve_hass_conf, + optim_conf, + plant_conf, + fcst.var_load_cost, + fcst.var_prod_price, + costfun, + emhass_conf, + logger, + ) P_PV_forecast.loc[:] = 0 P_load_forecast.loc[:] = 0 - - df_input_data.loc[df_input_data.index[25:30],'unit_load_cost'] = 2.0 # A price peak - unit_load_cost = df_input_data[opt.var_load_cost].values # €/kWh - unit_prod_price = df_input_data[opt.var_prod_price].values # €/kWh - - - opt_res_dayahead = opt.perform_optimization(df_input_data, P_PV_forecast.values.ravel(), - P_load_forecast.values.ravel(), - unit_load_cost, unit_prod_price, debug=True) - + + df_input_data.loc[df_input_data.index[25:30], "unit_load_cost"] = ( + 2.0 # A price peak + ) + unit_load_cost = df_input_data[opt.var_load_cost].values # €/kWh + unit_prod_price = df_input_data[opt.var_prod_price].values # €/kWh + + opt_res_dayahead = opt.perform_optimization( + df_input_data, + P_PV_forecast.values.ravel(), + P_load_forecast.values.ravel(), + unit_load_cost, + unit_prod_price, + debug=True, + ) + # Let's plot the input data fig_inputs_dah = df_input_data.plot() fig_inputs_dah.layout.template = template - fig_inputs_dah.update_yaxes(title_text = "Powers (W) and Costs(EUR)") - fig_inputs_dah.update_xaxes(title_text = "Time") + fig_inputs_dah.update_yaxes(title_text="Powers (W) and Costs(EUR)") + fig_inputs_dah.update_xaxes(title_text="Time") if show_figures: fig_inputs_dah.show() - - vars_to_plot = ['P_deferrable0', 'unit_load_cost', 'predicted_temp_heater0', 'target_temp_heater0', 'P_def_start_0'] - if plant_conf['inverter_is_hybrid']: - vars_to_plot = vars_to_plot + ['P_hybrid_inverter'] - if plant_conf['compute_curtailment']: - vars_to_plot = vars_to_plot + ['P_PV_curtailment'] - if optim_conf['set_use_battery']: - vars_to_plot = vars_to_plot + ['P_batt'] + ['SOC_opt'] - fig_res_dah = opt_res_dayahead[vars_to_plot].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' + + vars_to_plot = [ + "P_deferrable0", + "unit_load_cost", + "predicted_temp_heater0", + "target_temp_heater0", + "P_def_start_0", + ] + if plant_conf["inverter_is_hybrid"]: + vars_to_plot = vars_to_plot + ["P_hybrid_inverter"] + if plant_conf["compute_curtailment"]: + vars_to_plot = vars_to_plot + ["P_PV_curtailment"] + if optim_conf["set_use_battery"]: + vars_to_plot = vars_to_plot + ["P_batt"] + ["SOC_opt"] + fig_res_dah = opt_res_dayahead[ + vars_to_plot + ].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Powers (W)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Powers (W)") + fig_res_dah.update_xaxes(title_text="Time") if show_figures: fig_res_dah.show() - - print("System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\ - str(opt_res_dayahead['cost_profit'].sum())+", Status: "+opt_res_dayahead['optim_status'].unique().item()) - + + print( + "System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: " + + str(opt_res_dayahead["cost_profit"].sum()) + + ", Status: " + + opt_res_dayahead["optim_status"].unique().item() + ) + print(opt_res_dayahead[vars_to_plot]) - \ No newline at end of file diff --git a/scripts/script_thermal_model_optim.py b/scripts/script_thermal_model_optim.py index 49073737..0a05ab45 100644 --- a/scripts/script_thermal_model_optim.py +++ b/scripts/script_thermal_model_optim.py @@ -1,143 +1,209 @@ # -*- coding: utf-8 -*- -import json +import pathlib import pickle import random -import numpy as np + import pandas as pd -import pathlib -import plotly.express as px -import plotly.subplots as sp import plotly.io as pio -pio.renderers.default = 'browser' + +pio.renderers.default = "browser" pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_config, build_params +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_config, + build_params, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -if __name__ == '__main__': +if __name__ == "__main__": get_data_from_file = True params = None show_figures = True - template = 'presentation' - - # Build params with default config (no secrets) - config = build_config(emhass_conf,logger,emhass_conf['defaults_path']) - params = build_params(emhass_conf,{},config,logger) + template = "presentation" + + # Build params with default config (no secrets) + config = build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + params = build_params(emhass_conf, {}, config, logger) retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params, logger) - retrieve_hass_conf, optim_conf, plant_conf = \ - retrieve_hass_conf, optim_conf, plant_conf - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + retrieve_hass_conf, optim_conf, plant_conf = ( + retrieve_hass_conf, + optim_conf, + plant_conf, + ) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) if get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] else: - days_list = get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + days_list = get_days_list(retrieve_hass_conf["historic_days_to_retrieve"]) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, var_list, minimal_response=False, significant_changes_only=False + ) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() - - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - df_weather = fcst.get_weather_forecast(method='csv') + + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + df_weather = fcst.get_weather_forecast(method="csv") P_PV_forecast = fcst.get_power_from_weather(df_weather) - P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) + P_load_forecast = fcst.get_load_forecast(method=optim_conf["load_forecast_method"]) df_input_data = pd.concat([P_PV_forecast, P_load_forecast], axis=1) - df_input_data.columns = ['P_PV_forecast', 'P_load_forecast'] - + df_input_data.columns = ["P_PV_forecast", "P_load_forecast"] + df_input_data = fcst.get_load_cost_forecast(df_input_data) df_input_data = fcst.get_prod_price_forecast(df_input_data) - input_data_dict = {'retrieve_hass_conf': retrieve_hass_conf} - + input_data_dict = {"retrieve_hass_conf": retrieve_hass_conf} + # Set special debug cases - + # Solver configurations - optim_conf.update({'lp_solver': 'PULP_CBC_CMD'}) # set the name of the linear programming solver that will be used. Options are 'PULP_CBC_CMD', 'GLPK_CMD' and 'COIN_CMD'. - optim_conf.update({'lp_solver_path': 'empty'}) # set the path to the LP solver, COIN_CMD default is /usr/bin/cbc - + optim_conf.update( + {"lp_solver": "PULP_CBC_CMD"} + ) # set the name of the linear programming solver that will be used. Options are 'PULP_CBC_CMD', 'GLPK_CMD' and 'COIN_CMD'. + optim_conf.update( + {"lp_solver_path": "empty"} + ) # set the path to the LP solver, COIN_CMD default is /usr/bin/cbc + # Semi continuous and constant values - optim_conf.update({'treat_deferrable_load_as_semi_cont': [True, False]}) - optim_conf.update({'set_deferrable_load_single_constant': [True, False]}) - + optim_conf.update({"treat_deferrable_load_as_semi_cont": [True, False]}) + optim_conf.update({"set_deferrable_load_single_constant": [True, False]}) + # Thermal modeling - df_input_data['outdoor_temperature_forecast'] = [random.normalvariate(10.0, 3.0) for _ in range(48)] - + df_input_data["outdoor_temperature_forecast"] = [ + random.normalvariate(10.0, 3.0) for _ in range(48) + ] + runtimeparams = { - 'def_load_config': [ + "def_load_config": [ {}, - {'thermal_config': { - 'heating_rate': 5.0, - 'cooling_constant': 0.1, - 'overshoot_temperature': 24.0, - 'start_temperature': 20, - 'desired_temperatures': [21]*48, + { + "thermal_config": { + "heating_rate": 5.0, + "cooling_constant": 0.1, + "overshoot_temperature": 24.0, + "start_temperature": 20, + "desired_temperatures": [21] * 48, } - } + }, ] } - if 'def_load_config' in runtimeparams: - optim_conf["def_load_config"] = runtimeparams['def_load_config'] + if "def_load_config" in runtimeparams: + optim_conf["def_load_config"] = runtimeparams["def_load_config"] - costfun = 'profit' - opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, - fcst.var_load_cost, fcst.var_prod_price, - costfun, emhass_conf, logger) + costfun = "profit" + opt = Optimization( + retrieve_hass_conf, + optim_conf, + plant_conf, + fcst.var_load_cost, + fcst.var_prod_price, + costfun, + emhass_conf, + logger, + ) # opt_res_dayahead = opt.perform_dayahead_forecast_optim( # df_input_data, P_PV_forecast, P_load_forecast) - unit_load_cost = df_input_data[opt.var_load_cost].values # €/kWh - unit_prod_price = df_input_data[opt.var_prod_price].values # €/kWh - opt_res_dayahead = opt.perform_optimization(df_input_data, P_PV_forecast.values.ravel(), - P_load_forecast.values.ravel(), - unit_load_cost, unit_prod_price, debug=True) - + unit_load_cost = df_input_data[opt.var_load_cost].values # €/kWh + unit_prod_price = df_input_data[opt.var_prod_price].values # €/kWh + opt_res_dayahead = opt.perform_optimization( + df_input_data, + P_PV_forecast.values.ravel(), + P_load_forecast.values.ravel(), + unit_load_cost, + unit_prod_price, + debug=True, + ) + # Let's plot the input data fig_inputs_dah = df_input_data.plot() fig_inputs_dah.layout.template = template - fig_inputs_dah.update_yaxes(title_text = "Powers (W) and Costs(EUR)") - fig_inputs_dah.update_xaxes(title_text = "Time") + fig_inputs_dah.update_yaxes(title_text="Powers (W) and Costs(EUR)") + fig_inputs_dah.update_xaxes(title_text="Time") if show_figures: fig_inputs_dah.show() - - vars_to_plot = ['P_deferrable0', 'P_deferrable1','P_grid', 'P_PV', 'predicted_temp_heater1', 'target_temp_heater1', 'P_def_start_1', 'P_def_bin2_1'] - if plant_conf['inverter_is_hybrid']: - vars_to_plot = vars_to_plot + ['P_hybrid_inverter'] - if plant_conf['compute_curtailment']: - vars_to_plot = vars_to_plot + ['P_PV_curtailment'] - if optim_conf['set_use_battery']: - vars_to_plot = vars_to_plot + ['P_batt'] + ['SOC_opt'] - fig_res_dah = opt_res_dayahead[vars_to_plot].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' + + vars_to_plot = [ + "P_deferrable0", + "P_deferrable1", + "P_grid", + "P_PV", + "predicted_temp_heater1", + "target_temp_heater1", + "P_def_start_1", + "P_def_bin2_1", + ] + if plant_conf["inverter_is_hybrid"]: + vars_to_plot = vars_to_plot + ["P_hybrid_inverter"] + if plant_conf["compute_curtailment"]: + vars_to_plot = vars_to_plot + ["P_PV_curtailment"] + if optim_conf["set_use_battery"]: + vars_to_plot = vars_to_plot + ["P_batt"] + ["SOC_opt"] + fig_res_dah = opt_res_dayahead[ + vars_to_plot + ].plot() # 'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1' fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Powers (W)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Powers (W)") + fig_res_dah.update_xaxes(title_text="Time") if show_figures: fig_res_dah.show() - - print("System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\ - str(opt_res_dayahead['cost_profit'].sum())+", Status: "+opt_res_dayahead['optim_status'].unique().item()) - + + print( + "System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: " + + str(opt_res_dayahead["cost_profit"].sum()) + + ", Status: " + + opt_res_dayahead["optim_status"].unique().item() + ) + print(opt_res_dayahead[vars_to_plot]) - \ No newline at end of file diff --git a/scripts/special_config_analysis.py b/scripts/special_config_analysis.py index ce07fb61..703bd764 100644 --- a/scripts/special_config_analysis.py +++ b/scripts/special_config_analysis.py @@ -1,148 +1,322 @@ # -*- coding: utf-8 -*- -''' - This is a script for analysis plot. - To use this script you will need plotly and kaleido. Install them using: - pip install plotly - pip install kaleido - Before running this script you should perform a perfect optimization for each type of cost function: - profit, cost and self-consumption -''' -import numpy as np -import pandas as pd +""" +This is a script for analysis plot. +To use this script you will need plotly and kaleido. Install them using: + pip install plotly + pip install kaleido +Before running this script you should perform a perfect optimization for each type of cost function: +profit, cost and self-consumption +""" + import pathlib -import yaml -import json import pickle -import plotly.express as px -import plotly.subplots as sp + +import pandas as pd import plotly.io as pio -pio.renderers.default = 'browser' + +pio.renderers.default = "browser" pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_config, build_secrets, build_params +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_config, + build_params, + build_secrets, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['scripts_path'] = root / 'scripts/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['secrets_path'] = root / 'secrets_emhass.yaml' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["scripts_path"] = root / "scripts/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["secrets_path"] = root / "secrets_emhass.yaml" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file): - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - df_weather = fcst.get_weather_forecast(method='solar.forecast') + +def get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file +): + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + df_weather = fcst.get_weather_forecast(method="solar.forecast") P_PV_forecast = fcst.get_power_from_weather(df_weather) - P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) + P_load_forecast = fcst.get_load_forecast(method=optim_conf["load_forecast_method"]) df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) - df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, - fcst.var_load_cost, fcst.var_prod_price, - 'cost', emhass_conf, logger) + df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"] + opt = Optimization( + retrieve_hass_conf, + optim_conf, + plant_conf, + fcst.var_load_cost, + fcst.var_prod_price, + "cost", + emhass_conf, + logger, + ) return fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt -if __name__ == '__main__': + +if __name__ == "__main__": get_data_from_file = False # Build params with defaults, secret file, and added special config and secrets - config = build_config(emhass_conf,logger,emhass_conf['defaults_path'],emhass_conf['scripts_path'] / 'special_options.json') - emhass_conf,secrets = build_secrets(emhass_conf,logger,options_path=emhass_conf['scripts_path'] / 'special_options.json',secrets_path=emhass_conf['secrets_path'],no_response=True) + config = build_config( + emhass_conf, + logger, + emhass_conf["defaults_path"], + emhass_conf["scripts_path"] / "special_options.json", + ) + emhass_conf, secrets = build_secrets( + emhass_conf, + logger, + options_path=emhass_conf["scripts_path"] / "special_options.json", + secrets_path=emhass_conf["secrets_path"], + no_response=True, + ) params = build_params(emhass_conf, secrets, config, logger) - - pv_power_forecast = [0, 8, 27, 42, 47, 41, 25, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 52, 73, 74, 68, 44, 12, 0, 0, 0, 0] - load_power_forecast = [2850, 3021, 3107, 3582, 2551, 2554, 1856, 2505, 1768, 2540, 1722, 2463, 1670, 1379, 1165, 1000, 1641, 1181, 1861, 1414, 1467, 1344, 1209, 1531] - load_cost_forecast = [17.836, 19.146, 18.753, 17.838, 17.277, 16.282, 16.736, 16.047, 17.004, 19.982, 17.17, 16.968, 16.556, 16.21, 12.333, 10.937] - prod_price_forecast = [6.651, 7.743, 7.415, 6.653, 6.185, 5.356, 5.734, 5.16, 5.958, 8.439, 6.096, 5.928, 5.584, 5.296, 4.495, 3.332] + + pv_power_forecast = [ + 0, + 8, + 27, + 42, + 47, + 41, + 25, + 7, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 19, + 52, + 73, + 74, + 68, + 44, + 12, + 0, + 0, + 0, + 0, + ] + load_power_forecast = [ + 2850, + 3021, + 3107, + 3582, + 2551, + 2554, + 1856, + 2505, + 1768, + 2540, + 1722, + 2463, + 1670, + 1379, + 1165, + 1000, + 1641, + 1181, + 1861, + 1414, + 1467, + 1344, + 1209, + 1531, + ] + load_cost_forecast = [ + 17.836, + 19.146, + 18.753, + 17.838, + 17.277, + 16.282, + 16.736, + 16.047, + 17.004, + 19.982, + 17.17, + 16.968, + 16.556, + 16.21, + 12.333, + 10.937, + ] + prod_price_forecast = [ + 6.651, + 7.743, + 7.415, + 6.653, + 6.185, + 5.356, + 5.734, + 5.16, + 5.958, + 8.439, + 6.096, + 5.928, + 5.584, + 5.296, + 4.495, + 3.332, + ] prediction_horizon = 16 soc_init = 0.98 soc_final = 0.3 operating_hours_of_each_deferrable_load = [0] alpha = 1 beta = 0 - - params['passed_data'] = {'pv_power_forecast':pv_power_forecast,'load_power_forecast':load_power_forecast, - 'load_cost_forecast':load_cost_forecast,'prod_price_forecast':prod_price_forecast, - 'prediction_horizon':prediction_horizon,'soc_init':soc_init,'soc_final':soc_final, - 'operating_hours_of_each_deferrable_load':operating_hours_of_each_deferrable_load,'alpha':alpha,'beta':beta} - - params['optim_conf']['weather_forecast_method'] = 'list' - params['optim_conf']['load_forecast_method'] = 'list' - params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['production_price_forecast_method'] = 'list' - - data_path = emhass_conf['scripts_path'] / 'data_temp.pkl' + + params["passed_data"] = { + "pv_power_forecast": pv_power_forecast, + "load_power_forecast": load_power_forecast, + "load_cost_forecast": load_cost_forecast, + "prod_price_forecast": prod_price_forecast, + "prediction_horizon": prediction_horizon, + "soc_init": soc_init, + "soc_final": soc_final, + "operating_hours_of_each_deferrable_load": operating_hours_of_each_deferrable_load, + "alpha": alpha, + "beta": beta, + } + + params["optim_conf"]["weather_forecast_method"] = "list" + params["optim_conf"]["load_forecast_method"] = "list" + params["optim_conf"]["load_cost_forecast_method"] = "list" + params["optim_conf"]["production_price_forecast_method"] = "list" + + data_path = emhass_conf["scripts_path"] / "data_temp.pkl" retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params, logger) - + if data_path.is_file(): logger.info("Loading a previous data file") with open(data_path, "rb") as fid: - fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt, df_input_data = pickle.load(fid) + ( + fcst, + P_PV_forecast, + P_load_forecast, + df_input_data_dayahead, + opt, + df_input_data, + ) = pickle.load(fid) else: - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) - days_list = get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) + days_list = get_days_list(retrieve_hass_conf["historic_days_to_retrieve"]) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, var_list, minimal_response=False, significant_changes_only=False + ) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() - fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = \ - get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file) + fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = ( + get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file + ) + ) df_input_data = fcst.get_load_cost_forecast(df_input_data) df_input_data = fcst.get_prod_price_forecast(df_input_data) - - with open(data_path, 'wb') as fid: - pickle.dump((fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt, df_input_data), fid, pickle.HIGHEST_PROTOCOL) - - template = 'presentation' - + + with open(data_path, "wb") as fid: + pickle.dump( + ( + fcst, + P_PV_forecast, + P_load_forecast, + df_input_data_dayahead, + opt, + df_input_data, + ), + fid, + pickle.HIGHEST_PROTOCOL, + ) + + template = "presentation" + # Let's plot the input data - fig_inputs1 = df_input_data[[str(retrieve_hass_conf['sensor_power_photovoltaics']), - str(retrieve_hass_conf['sensor_power_load_no_var_loads'] + '_positive')]].plot() + fig_inputs1 = df_input_data[ + [ + str(retrieve_hass_conf["sensor_power_photovoltaics"]), + str(retrieve_hass_conf["sensor_power_load_no_var_loads"] + "_positive"), + ] + ].plot() fig_inputs1.layout.template = template - fig_inputs1.update_yaxes(title_text = "Powers (W)") - fig_inputs1.update_xaxes(title_text = "Time") + fig_inputs1.update_yaxes(title_text="Powers (W)") + fig_inputs1.update_xaxes(title_text="Time") fig_inputs1.show() - - fig_inputs2 = df_input_data[['unit_load_cost', - 'unit_prod_price']].plot() + + fig_inputs2 = df_input_data[["unit_load_cost", "unit_prod_price"]].plot() fig_inputs2.layout.template = template - fig_inputs2.update_yaxes(title_text = "Load cost and production sell price (EUR)") - fig_inputs2.update_xaxes(title_text = "Time") + fig_inputs2.update_yaxes(title_text="Load cost and production sell price (EUR)") + fig_inputs2.update_xaxes(title_text="Time") fig_inputs2.show() - + fig_inputs_dah = df_input_data_dayahead.plot() fig_inputs_dah.layout.template = template - fig_inputs_dah.update_yaxes(title_text = "Powers (W)") - fig_inputs_dah.update_xaxes(title_text = "Time") + fig_inputs_dah.update_yaxes(title_text="Powers (W)") + fig_inputs_dah.update_xaxes(title_text="Time") fig_inputs_dah.show() - + # Perform a dayahead optimization - '''df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead) + """df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead) df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead) opt_res_dah = opt.perform_dayahead_forecast_optim(df_input_data_dayahead, P_PV_forecast, P_load_forecast) fig_res_dah = opt_res_dah[['P_deferrable0', 'P_deferrable1', 'P_grid']].plot() fig_res_dah.layout.template = template fig_res_dah.update_yaxes(title_text = "Powers (W)") fig_res_dah.update_xaxes(title_text = "Time") - fig_res_dah.show()''' - + fig_res_dah.show()""" + '''post_mpc_optim: "curl -i -H \"Content-Type: application/json\" -X POST -d '{ \"load_cost_forecast\":[17.836, 19.146, 18.753, 17.838, 17.277, 16.282, 16.736, 16.047, 17.004, 19.982, 17.17, 16.968, 16.556, 16.21, 12.333, 10.937], \"prod_price_forecast\":[6.651, 7.743, 7.415, 6.653, 6.185, 5.356, 5.734, 5.16, 5.958, 8.439, 6.096, 5.928, 5.584, 5.296, 4.495, 3.332], @@ -150,24 +324,33 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, \"pv_power_forecast\": [0, 8, 27, 42, 47, 41, 25, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 52, 73, 74, 68, 44, 12, 0, 0, 0, 0], \"alpha\": 1, \"beta\": 0, \"soc_init\":0.98, \"soc_final\":0.3, \"operating_hours_of_each_deferrable_load\":[0] }' http://localhost:5000/action/naive-mpc-optim"''' - + # Perform a MPC optimization - df_input_data_dayahead['unit_load_cost'] = load_cost_forecast - df_input_data_dayahead.loc[df_input_data_dayahead.index[2]:df_input_data_dayahead.index[6],'unit_load_cost'] = 150 - df_input_data_dayahead['unit_prod_price'] = prod_price_forecast - - opt.optim_conf['weight_battery_discharge'] = 0.0 - opt.optim_conf['weight_battery_charge'] = 0.0 - opt.optim_conf['battery_dynamic_max'] = 0.9 - opt.optim_conf['set_nocharge_from_grid'] = False - opt.optim_conf['set_nodischarge_to_grid'] = False - opt.optim_conf['set_total_pv_sell'] = False - + df_input_data_dayahead["unit_load_cost"] = load_cost_forecast + df_input_data_dayahead.loc[ + df_input_data_dayahead.index[2] : df_input_data_dayahead.index[6], + "unit_load_cost", + ] = 150 + df_input_data_dayahead["unit_prod_price"] = prod_price_forecast + + opt.optim_conf["weight_battery_discharge"] = 0.0 + opt.optim_conf["weight_battery_charge"] = 0.0 + opt.optim_conf["battery_dynamic_max"] = 0.9 + opt.optim_conf["set_nocharge_from_grid"] = False + opt.optim_conf["set_nodischarge_to_grid"] = False + opt.optim_conf["set_total_pv_sell"] = False + opt_res_dayahead = opt.perform_naive_mpc_optim( - df_input_data_dayahead, P_PV_forecast, P_load_forecast, prediction_horizon, - soc_init=soc_init, soc_final=soc_final, def_total_hours=operating_hours_of_each_deferrable_load) - fig_res_mpc = opt_res_dayahead[['P_batt', 'P_grid']].plot() + df_input_data_dayahead, + P_PV_forecast, + P_load_forecast, + prediction_horizon, + soc_init=soc_init, + soc_final=soc_final, + def_total_hours=operating_hours_of_each_deferrable_load, + ) + fig_res_mpc = opt_res_dayahead[["P_batt", "P_grid"]].plot() fig_res_mpc.layout.template = template - fig_res_mpc.update_yaxes(title_text = "Powers (W)") - fig_res_mpc.update_xaxes(title_text = "Time") - fig_res_mpc.show() \ No newline at end of file + fig_res_mpc.update_yaxes(title_text="Powers (W)") + fig_res_mpc.update_xaxes(title_text="Time") + fig_res_mpc.show() diff --git a/scripts/use_cases_analysis.py b/scripts/use_cases_analysis.py index 9ae5234c..4be8dc28 100644 --- a/scripts/use_cases_analysis.py +++ b/scripts/use_cases_analysis.py @@ -1,189 +1,282 @@ # -*- coding: utf-8 -*- -''' - This is a script for analysis plot. - To use this script you will need plotly and kaleido. Install them using: - pip install plotly - pip install kaleido - Before running this script you should perform a perfect optimization for each type of cost function: - profit, cost and self-consumption -''' -import json -import numpy as np -import pandas as pd +""" +This is a script for analysis plot. +To use this script you will need plotly and kaleido. Install them using: + pip install plotly + pip install kaleido +Before running this script you should perform a perfect optimization for each type of cost function: +profit, cost and self-consumption +""" + import pathlib -import plotly.express as px -import plotly.subplots as sp + +import pandas as pd import plotly.io as pio -pio.renderers.default = 'browser' + +pio.renderers.default = "browser" pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger, build_config, build_secrets, build_params +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_config, + build_params, + build_secrets, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) # the root folder root = pathlib.Path(str(get_root(__file__, num_parent=2))) emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['docs_path'] = root / 'docs/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['secrets_path'] = root / 'secrets_emhass.yaml' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["docs_path"] = root / "docs/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["secrets_path"] = root / "secrets_emhass.yaml" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file): - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - df_weather = fcst.get_weather_forecast(method='solar.forecast') + +def get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file +): + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + df_weather = fcst.get_weather_forecast(method="solar.forecast") P_PV_forecast = fcst.get_power_from_weather(df_weather) - P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) + P_load_forecast = fcst.get_load_forecast(method=optim_conf["load_forecast_method"]) df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) - df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, - fcst.var_load_cost, fcst.var_prod_price, - 'profit', emhass_conf, logger) + df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"] + opt = Optimization( + retrieve_hass_conf, + optim_conf, + plant_conf, + fcst.var_load_cost, + fcst.var_prod_price, + "profit", + emhass_conf, + logger, + ) return fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt -if __name__ == '__main__': + +if __name__ == "__main__": get_data_from_file = False params = None save_figures = False # Build params with default config and secrets file - config = build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = build_secrets(emhass_conf,logger,secrets_path=emhass_conf['secrets_path'],no_response=True) - params = build_params(emhass_conf,secrets,config,logger) + config = build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + _, secrets = build_secrets( + emhass_conf, logger, secrets_path=emhass_conf["secrets_path"], no_response=True + ) + params = build_params(emhass_conf, secrets, config, logger) retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(params, logger) - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) - days_list = get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) + days_list = get_days_list(retrieve_hass_conf["historic_days_to_retrieve"]) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, var_list, minimal_response=False, significant_changes_only=False + ) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() - fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = \ - get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file) + fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = ( + get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file + ) + ) df_input_data = fcst.get_load_cost_forecast(df_input_data) df_input_data = fcst.get_prod_price_forecast(df_input_data) - - template = 'presentation' - + + template = "presentation" + # Let's plot the input data - fig_inputs1 = df_input_data[[str(retrieve_hass_conf['sensor_power_photovoltaics']), - str(retrieve_hass_conf['sensor_power_load_no_var_loads'] + '_positive')]].plot() + fig_inputs1 = df_input_data[ + [ + str(retrieve_hass_conf["sensor_power_photovoltaics"]), + str(retrieve_hass_conf["sensor_power_load_no_var_loads"] + "_positive"), + ] + ].plot() fig_inputs1.layout.template = template - fig_inputs1.update_yaxes(title_text = "Powers (W)") - fig_inputs1.update_xaxes(title_text = "Time") + fig_inputs1.update_yaxes(title_text="Powers (W)") + fig_inputs1.update_xaxes(title_text="Time") fig_inputs1.show() if save_figures: - fig_inputs1.write_image(emhass_conf['docs_path'] / "images/inputs_power.svg", - width=1080, height=0.8*1080) - - fig_inputs2 = df_input_data[['unit_load_cost', - 'unit_prod_price']].plot() + fig_inputs1.write_image( + emhass_conf["docs_path"] / "images/inputs_power.svg", + width=1080, + height=0.8 * 1080, + ) + + fig_inputs2 = df_input_data[["unit_load_cost", "unit_prod_price"]].plot() fig_inputs2.layout.template = template - fig_inputs2.update_yaxes(title_text = "Load cost and production sell price (EUR)") - fig_inputs2.update_xaxes(title_text = "Time") + fig_inputs2.update_yaxes(title_text="Load cost and production sell price (EUR)") + fig_inputs2.update_xaxes(title_text="Time") fig_inputs2.show() if save_figures: - fig_inputs2.write_image(emhass_conf['docs_path'] / "images/inputs_cost_price.svg", - width=1080, height=0.8*1080) - + fig_inputs2.write_image( + emhass_conf["docs_path"] / "images/inputs_cost_price.svg", + width=1080, + height=0.8 * 1080, + ) + fig_inputs_dah = df_input_data_dayahead.plot() fig_inputs_dah.layout.template = template - fig_inputs_dah.update_yaxes(title_text = "Powers (W)") - fig_inputs_dah.update_xaxes(title_text = "Time") + fig_inputs_dah.update_yaxes(title_text="Powers (W)") + fig_inputs_dah.update_xaxes(title_text="Time") fig_inputs_dah.show() if save_figures: - fig_inputs_dah.write_image(emhass_conf['docs_path'] / "images/inputs_dayahead.svg", - width=1080, height=0.8*1080) - + fig_inputs_dah.write_image( + emhass_conf["docs_path"] / "images/inputs_dayahead.svg", + width=1080, + height=0.8 * 1080, + ) + # Let's first perform a perfect optimization opt_res = opt.perform_perfect_forecast_optim(df_input_data, days_list) - fig_res = opt_res[['P_deferrable0', 'P_deferrable1', 'P_grid']].plot() + fig_res = opt_res[["P_deferrable0", "P_deferrable1", "P_grid"]].plot() fig_res.layout.template = template - fig_res.update_yaxes(title_text = "Powers (W)") - fig_res.update_xaxes(title_text = "Time") + fig_res.update_yaxes(title_text="Powers (W)") + fig_res.update_xaxes(title_text="Time") fig_res.show() if save_figures: - fig_res.write_image(emhass_conf['docs_path'] / "images/optim_results_PV_defLoads_perfectOptim.svg", - width=1080, height=0.8*1080) - - print("System with: PV, two deferrable loads, perfect optimization, profit >> total cost function sum: "+\ - str(opt_res['cost_profit'].sum())) - + fig_res.write_image( + emhass_conf["docs_path"] + / "images/optim_results_PV_defLoads_perfectOptim.svg", + width=1080, + height=0.8 * 1080, + ) + + print( + "System with: PV, two deferrable loads, perfect optimization, profit >> total cost function sum: " + + str(opt_res["cost_profit"].sum()) + ) + # And then perform a dayahead optimization df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead) df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead) - opt_res_dah = opt.perform_dayahead_forecast_optim(df_input_data_dayahead, P_PV_forecast, P_load_forecast) - fig_res_dah = opt_res_dah[['P_deferrable0', 'P_deferrable1', 'P_grid']].plot() + opt_res_dah = opt.perform_dayahead_forecast_optim( + df_input_data_dayahead, P_PV_forecast, P_load_forecast + ) + fig_res_dah = opt_res_dah[["P_deferrable0", "P_deferrable1", "P_grid"]].plot() fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Powers (W)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Powers (W)") + fig_res_dah.update_xaxes(title_text="Time") fig_res_dah.show() if save_figures: - fig_res_dah.write_image(emhass_conf['docs_path'] / "images/optim_results_PV_defLoads_dayaheadOptim.svg", - width=1080, height=0.8*1080) - - print("System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\ - str(opt_res_dah['cost_profit'].sum())) - + fig_res_dah.write_image( + emhass_conf["docs_path"] + / "images/optim_results_PV_defLoads_dayaheadOptim.svg", + width=1080, + height=0.8 * 1080, + ) + + print( + "System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: " + + str(opt_res_dah["cost_profit"].sum()) + ) + # Let's simplify to a system with only two deferrable loads, no PV installation - retrieve_hass_conf['solar_forecast_kwp'] = 0 - fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = \ - get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file) + retrieve_hass_conf["solar_forecast_kwp"] = 0 + fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = ( + get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file + ) + ) df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead) df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead) - opt_res_dah = opt.perform_dayahead_forecast_optim(df_input_data_dayahead, P_PV_forecast, P_load_forecast) - fig_res_dah = opt_res_dah[['P_deferrable0', 'P_deferrable1', 'P_grid']].plot() + opt_res_dah = opt.perform_dayahead_forecast_optim( + df_input_data_dayahead, P_PV_forecast, P_load_forecast + ) + fig_res_dah = opt_res_dah[["P_deferrable0", "P_deferrable1", "P_grid"]].plot() fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Powers (W)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Powers (W)") + fig_res_dah.update_xaxes(title_text="Time") fig_res_dah.show() if save_figures: - fig_res_dah.write_image(emhass_conf['docs_path'] / "images/optim_results_defLoads_dayaheadOptim.svg", - width=1080, height=0.8*1080) - - print("System with: two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\ - str(opt_res_dah['cost_profit'].sum())) - + fig_res_dah.write_image( + emhass_conf["docs_path"] + / "images/optim_results_defLoads_dayaheadOptim.svg", + width=1080, + height=0.8 * 1080, + ) + + print( + "System with: two deferrable loads, dayahead optimization, profit >> total cost function sum: " + + str(opt_res_dah["cost_profit"].sum()) + ) + # Now a complete system with PV, Battery and two deferrable loads - retrieve_hass_conf['solar_forecast_kwp'] = 5 - optim_conf['set_use_battery'] = True - fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = \ - get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, - params, get_data_from_file) + retrieve_hass_conf["solar_forecast_kwp"] = 5 + optim_conf["set_use_battery"] = True + fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = ( + get_forecast_optim_objects( + retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file + ) + ) df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead) df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead) - opt_res_dah = opt.perform_dayahead_forecast_optim(df_input_data_dayahead, P_PV_forecast, P_load_forecast) - fig_res_dah = opt_res_dah[['P_deferrable0', 'P_deferrable1', 'P_grid', 'P_batt']].plot() + opt_res_dah = opt.perform_dayahead_forecast_optim( + df_input_data_dayahead, P_PV_forecast, P_load_forecast + ) + fig_res_dah = opt_res_dah[ + ["P_deferrable0", "P_deferrable1", "P_grid", "P_batt"] + ].plot() fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Powers (W)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Powers (W)") + fig_res_dah.update_xaxes(title_text="Time") fig_res_dah.show() if save_figures: - fig_res_dah.write_image(emhass_conf['docs_path'] / "images/optim_results_PV_Batt_defLoads_dayaheadOptim.svg", - width=1080, height=0.8*1080) - fig_res_dah = opt_res_dah[['SOC_opt']].plot() + fig_res_dah.write_image( + emhass_conf["docs_path"] + / "images/optim_results_PV_Batt_defLoads_dayaheadOptim.svg", + width=1080, + height=0.8 * 1080, + ) + fig_res_dah = opt_res_dah[["SOC_opt"]].plot() fig_res_dah.layout.template = template - fig_res_dah.update_yaxes(title_text = "Battery State of Charge (%)") - fig_res_dah.update_xaxes(title_text = "Time") + fig_res_dah.update_yaxes(title_text="Battery State of Charge (%)") + fig_res_dah.update_xaxes(title_text="Time") fig_res_dah.show() if save_figures: - fig_res_dah.write_image(emhass_conf['docs_path'] / "images/optim_results_PV_Batt_defLoads_dayaheadOptim_SOC.svg", - width=1080, height=0.8*1080) - - print("System with: PV, Battery, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\ - str(opt_res_dah['cost_profit'].sum())) \ No newline at end of file + fig_res_dah.write_image( + emhass_conf["docs_path"] + / "images/optim_results_PV_Batt_defLoads_dayaheadOptim_SOC.svg", + width=1080, + height=0.8 * 1080, + ) + + print( + "System with: PV, Battery, two deferrable loads, dayahead optimization, profit >> total cost function sum: " + + str(opt_res_dah["cost_profit"].sum()) + ) diff --git a/setup.py b/setup.py index 1bf978be..5dfbadaf 100644 --- a/setup.py +++ b/setup.py @@ -12,57 +12,75 @@ here = pathlib.Path(__file__).parent.resolve() # Get the long description from the README file -long_description = (here / 'README.md').read_text(encoding='utf-8') +long_description = (here / "README.md").read_text(encoding="utf-8") # Arguments marked as "Required" below must be included for upload to PyPI. # Fields marked as "Optional" may be commented out. setup( - name='emhass', # Required - version='0.11.2', # Required - description='An Energy Management System for Home Assistant', # Optional + name="emhass", # Required + version="0.11.2", # Required + description="An Energy Management System for Home Assistant", # Optional long_description=long_description, # Optional - long_description_content_type='text/markdown', # Optional (see note above) - url='https://github.com/davidusb-geek/emhass', # Optional - author='David HERNANDEZ', # Optional - author_email='davidusb@gmail.com', # Optional + long_description_content_type="text/markdown", # Optional (see note above) + url="https://github.com/davidusb-geek/emhass", # Optional + author="David HERNANDEZ", # Optional + author_email="davidusb@gmail.com", # Optional classifiers=[ # Optional - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 3.11', + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Topic :: Software Development :: Build Tools", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", ], - keywords='energy, management, optimization, hass', # Optional - package_dir={'': 'src'}, # Optional - packages=find_packages(where='src'), # Required - python_requires='>=3.10, <3.12', + keywords="energy, management, optimization, hass", # Optional + package_dir={"": "src"}, # Optional + packages=find_packages(where="src"), # Required + python_requires=">=3.10, <3.12", install_requires=[ - 'wheel', - 'numpy==1.26.4', - 'scipy==1.12.0', - 'pandas<=2.0.3', - 'pvlib>=0.10.2', - 'protobuf>=3.0.0', - 'pytz>=2021.1', - 'requests>=2.25.1', - 'beautifulsoup4>=4.9.3', - 'h5py==3.12.1', - 'pulp>=2.4', - 'pyyaml>=5.4.1', - 'tables<=3.9.1', - 'skforecast==0.14.0', - 'flask>=2.0.3', - 'waitress>=2.1.1', - 'plotly>=5.6.0' + "wheel", + "numpy==1.26.4", + "scipy==1.12.0", + "pandas<=2.0.3", + "pvlib>=0.10.2", + "protobuf>=3.0.0", + "pytz>=2021.1", + "requests>=2.25.1", + "beautifulsoup4>=4.9.3", + "h5py==3.12.1", + "pulp>=2.4", + "pyyaml>=5.4.1", + "tables<=3.9.1", + "skforecast==0.14.0", + "flask>=2.0.3", + "waitress>=2.1.1", + "plotly>=5.6.0", ], # Optional entry_points={ # Optional - 'console_scripts': [ - 'emhass=emhass.command_line:main', + "console_scripts": [ + "emhass=emhass.command_line:main", ], }, - package_data={'emhass': ['templates/index.html','templates/template.html','templates/configuration.html','static/advanced.html','static/basic.html', 'static/script.js', 'static/configuration_script.js', - 'static/style.css','static/configuration_list.html','static/img/emhass_icon.png','static/img/emhass_logo_short.svg', 'static/img/feather-sprite.svg','static/data/param_definitions.json', - 'data/cec_modules.pbz2', 'data/cec_inverters.pbz2','data/associations.csv','data/config_defaults.json']}, + package_data={ + "emhass": [ + "templates/index.html", + "templates/template.html", + "templates/configuration.html", + "static/advanced.html", + "static/basic.html", + "static/script.js", + "static/configuration_script.js", + "static/style.css", + "static/configuration_list.html", + "static/img/emhass_icon.png", + "static/img/emhass_logo_short.svg", + "static/img/feather-sprite.svg", + "static/data/param_definitions.json", + "data/cec_modules.pbz2", + "data/cec_inverters.pbz2", + "data/associations.csv", + "data/config_defaults.json", + ] + }, ) diff --git a/src/emhass/command_line.py b/src/emhass/command_line.py index a3ed0430..51bf9a6b 100644 --- a/src/emhass/command_line.py +++ b/src/emhass/command_line.py @@ -2,33 +2,39 @@ # -*- coding: utf-8 -*- import argparse +import copy +import json +import logging import os -import re -import time import pathlib -import logging -import json -import copy import pickle +import re +import time from datetime import datetime, timezone -from typing import Optional, Tuple +from distutils.util import strtobool from importlib.metadata import version +from typing import Optional, Tuple + import numpy as np import pandas as pd -from distutils.util import strtobool - -from emhass.retrieve_hass import RetrieveHass +from emhass import utils from emhass.forecast import Forecast from emhass.machine_learning_forecaster import MLForecaster -from emhass.optimization import Optimization from emhass.machine_learning_regressor import MLRegressor -from emhass import utils +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass -def set_input_data_dict(emhass_conf: dict, costfun: str, - params: str, runtimeparams: str, set_type: str, logger: logging.Logger, - get_data_from_file: Optional[bool] = False) -> dict: +def set_input_data_dict( + emhass_conf: dict, + costfun: str, + params: str, + runtimeparams: str, + set_type: str, + logger: logging.Logger, + get_data_from_file: Optional[bool] = False, +) -> dict: """ Set up some of the data needed for the different actions. @@ -60,46 +66,88 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, params = {} # Parsing yaml - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger) if type(retrieve_hass_conf) is bool: return False - + # Treat runtimeparams params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) + runtimeparams, + params, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) # Define main objects - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, - fcst.var_load_cost, fcst.var_prod_price, - costfun, emhass_conf, logger) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + opt = Optimization( + retrieve_hass_conf, + optim_conf, + plant_conf, + fcst.var_load_cost, + fcst.var_prod_price, + costfun, + emhass_conf, + logger, + ) # Perform setup based on type of action if set_type == "perfect-optim": # Retrieve data from hass if get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [ - retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [ - retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] else: days_list = utils.get_days_list( - retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], - retrieve_hass_conf['sensor_power_photovoltaics']] - if not rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False): + retrieve_hass_conf["historic_days_to_retrieve"] + ) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + if not rh.get_data( + days_list, + var_list, + minimal_response=False, + significant_changes_only=False, + ): return False - if not rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], - load_negative=retrieve_hass_conf['load_negative'], - set_zero_min=retrieve_hass_conf['set_zero_min'], - var_replace_zero=retrieve_hass_conf['sensor_replace_zero'], - var_interp=retrieve_hass_conf['sensor_linear_interp']): + if not rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ): return False df_input_data = rh.df_final.copy() # What we don't need for this type of action @@ -107,101 +155,162 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, elif set_type == "dayahead-optim": # Get PV and load forecasts df_weather = fcst.get_weather_forecast( - method=optim_conf['weather_forecast_method']) + method=optim_conf["weather_forecast_method"] + ) if isinstance(df_weather, bool) and not df_weather: return False P_PV_forecast = fcst.get_power_from_weather(df_weather) P_load_forecast = fcst.get_load_forecast( - method=optim_conf['load_forecast_method']) + method=optim_conf["load_forecast_method"] + ) if isinstance(P_load_forecast, bool) and not P_load_forecast: logger.error( - "Unable to get sensor power photovoltaics, or sensor power load no var loads. Check HA sensors and their daily data") + "Unable to get sensor power photovoltaics, or sensor power load no var loads. Check HA sensors and their daily data" + ) return False - df_input_data_dayahead = pd.DataFrame(np.transpose(np.vstack( - [P_PV_forecast.values, P_load_forecast.values])), index=P_PV_forecast.index, - columns=["P_PV_forecast", "P_load_forecast"]) - if "optimization_time_step" in retrieve_hass_conf and retrieve_hass_conf["optimization_time_step"]: - if not isinstance(retrieve_hass_conf["optimization_time_step"], pd._libs.tslibs.timedeltas.Timedelta): - optimization_time_step = pd.to_timedelta(retrieve_hass_conf["optimization_time_step"], "minute") + df_input_data_dayahead = pd.DataFrame( + np.transpose(np.vstack([P_PV_forecast.values, P_load_forecast.values])), + index=P_PV_forecast.index, + columns=["P_PV_forecast", "P_load_forecast"], + ) + if ( + "optimization_time_step" in retrieve_hass_conf + and retrieve_hass_conf["optimization_time_step"] + ): + if not isinstance( + retrieve_hass_conf["optimization_time_step"], + pd._libs.tslibs.timedeltas.Timedelta, + ): + optimization_time_step = pd.to_timedelta( + retrieve_hass_conf["optimization_time_step"], "minute" + ) else: optimization_time_step = retrieve_hass_conf["optimization_time_step"] - df_input_data_dayahead = df_input_data_dayahead.asfreq(optimization_time_step) + df_input_data_dayahead = df_input_data_dayahead.asfreq( + optimization_time_step + ) else: df_input_data_dayahead = utils.set_df_index_freq(df_input_data_dayahead) params = json.loads(params) - if ("prediction_horizon" in params["passed_data"] and params["passed_data"]["prediction_horizon"] is not None): + if ( + "prediction_horizon" in params["passed_data"] + and params["passed_data"]["prediction_horizon"] is not None + ): prediction_horizon = params["passed_data"]["prediction_horizon"] df_input_data_dayahead = copy.deepcopy(df_input_data_dayahead)[ - df_input_data_dayahead.index[0]: df_input_data_dayahead.index[prediction_horizon - 1]] + df_input_data_dayahead.index[0] : df_input_data_dayahead.index[ + prediction_horizon - 1 + ] + ] # What we don't need for this type of action df_input_data, days_list = None, None elif set_type == "naive-mpc-optim": # Retrieve data from hass if get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [ - retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [ - retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] else: days_list = utils.get_days_list(1) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], - retrieve_hass_conf['sensor_power_photovoltaics']] - if not rh.get_data(days_list, var_list, minimal_response=False, significant_changes_only=False): + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + if not rh.get_data( + days_list, + var_list, + minimal_response=False, + significant_changes_only=False, + ): return False - if not rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], - load_negative=retrieve_hass_conf['load_negative'], - set_zero_min=retrieve_hass_conf['set_zero_min'], - var_replace_zero=retrieve_hass_conf['sensor_replace_zero'], - var_interp=retrieve_hass_conf['sensor_linear_interp']): + if not rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ): return False df_input_data = rh.df_final.copy() # Get PV and load forecasts df_weather = fcst.get_weather_forecast( - method=optim_conf['weather_forecast_method']) + method=optim_conf["weather_forecast_method"] + ) if isinstance(df_weather, bool) and not df_weather: return False P_PV_forecast = fcst.get_power_from_weather( - df_weather, set_mix_forecast=True, df_now=df_input_data) + df_weather, set_mix_forecast=True, df_now=df_input_data + ) P_load_forecast = fcst.get_load_forecast( - method=optim_conf['load_forecast_method'], set_mix_forecast=True, df_now=df_input_data) + method=optim_conf["load_forecast_method"], + set_mix_forecast=True, + df_now=df_input_data, + ) if isinstance(P_load_forecast, bool) and not P_load_forecast: logger.error( - "Unable to get sensor power photovoltaics, or sensor power load no var loads. Check HA sensors and their daily data") + "Unable to get sensor power photovoltaics, or sensor power load no var loads. Check HA sensors and their daily data" + ) return False df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) - if "optimization_time_step" in retrieve_hass_conf and retrieve_hass_conf["optimization_time_step"]: - if not isinstance(retrieve_hass_conf["optimization_time_step"], pd._libs.tslibs.timedeltas.Timedelta): - optimization_time_step = pd.to_timedelta(retrieve_hass_conf["optimization_time_step"], "minute") + if ( + "optimization_time_step" in retrieve_hass_conf + and retrieve_hass_conf["optimization_time_step"] + ): + if not isinstance( + retrieve_hass_conf["optimization_time_step"], + pd._libs.tslibs.timedeltas.Timedelta, + ): + optimization_time_step = pd.to_timedelta( + retrieve_hass_conf["optimization_time_step"], "minute" + ) else: optimization_time_step = retrieve_hass_conf["optimization_time_step"] - df_input_data_dayahead = df_input_data_dayahead.asfreq(optimization_time_step) + df_input_data_dayahead = df_input_data_dayahead.asfreq( + optimization_time_step + ) else: df_input_data_dayahead = utils.set_df_index_freq(df_input_data_dayahead) df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"] params = json.loads(params) - if ("prediction_horizon" in params["passed_data"] and params["passed_data"]["prediction_horizon"] is not None): + if ( + "prediction_horizon" in params["passed_data"] + and params["passed_data"]["prediction_horizon"] is not None + ): prediction_horizon = params["passed_data"]["prediction_horizon"] df_input_data_dayahead = copy.deepcopy(df_input_data_dayahead)[ - df_input_data_dayahead.index[0]: df_input_data_dayahead.index[prediction_horizon - 1]] - elif (set_type == "forecast-model-fit" or set_type == "forecast-model-predict" or set_type == "forecast-model-tune"): + df_input_data_dayahead.index[0] : df_input_data_dayahead.index[ + prediction_horizon - 1 + ] + ] + elif ( + set_type == "forecast-model-fit" + or set_type == "forecast-model-predict" + or set_type == "forecast-model-tune" + ): df_input_data_dayahead = None P_PV_forecast, P_load_forecast = None, None params = json.loads(params) # Retrieve data from hass - days_to_retrieve = params["passed_data"]['historic_days_to_retrieve'] + days_to_retrieve = params["passed_data"]["historic_days_to_retrieve"] model_type = params["passed_data"]["model_type"] var_model = params["passed_data"]["var_model"] if get_data_from_file: days_list = None - filename = 'data_train_'+model_type+'.pkl' - filename_path = emhass_conf['data_path'] / filename - with open(filename_path, 'rb') as inp: + filename = "data_train_" + model_type + ".pkl" + filename_path = emhass_conf["data_path"] / filename + with open(filename_path, "rb") as inp: df_input_data, _ = pickle.load(inp) - df_input_data = df_input_data[df_input_data.index[-1] - pd.offsets.Day(days_to_retrieve):] + df_input_data = df_input_data[ + df_input_data.index[-1] - pd.offsets.Day(days_to_retrieve) : + ] else: days_list = utils.get_days_list(days_to_retrieve) var_list = [var_model] @@ -229,8 +338,12 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, if filename_path.is_file(): df_input_data = pd.read_csv(filename_path, parse_dates=True) else: - logger.error("The CSV file " + csv_file + - " was not found in path: " + str(emhass_conf["data_path"])) + logger.error( + "The CSV file " + + csv_file + + " was not found in path: " + + str(emhass_conf["data_path"]) + ) return False # raise ValueError("The CSV file " + csv_file + " was not found.") required_columns = [] @@ -239,8 +352,7 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, if timestamp is not None: required_columns.append(timestamp) if not set(required_columns).issubset(df_input_data.columns): - logger.error( - "The cvs file does not contain the required columns.") + logger.error("The cvs file does not contain the required columns.") msg = f"CSV file should contain the following columns: {', '.join(required_columns)}" logger.error(msg) return False @@ -257,23 +369,25 @@ def set_input_data_dict(emhass_conf: dict, costfun: str, days_list = None # The input data dictionary to return input_data_dict = { - 'emhass_conf': emhass_conf, - 'retrieve_hass_conf': retrieve_hass_conf, - 'rh': rh, - 'opt': opt, - 'fcst': fcst, - 'df_input_data': df_input_data, - 'df_input_data_dayahead': df_input_data_dayahead, - 'P_PV_forecast': P_PV_forecast, - 'P_load_forecast': P_load_forecast, - 'costfun': costfun, - 'params': params, - 'days_list': days_list + "emhass_conf": emhass_conf, + "retrieve_hass_conf": retrieve_hass_conf, + "rh": rh, + "opt": opt, + "fcst": fcst, + "df_input_data": df_input_data, + "df_input_data_dayahead": df_input_data_dayahead, + "P_PV_forecast": P_PV_forecast, + "P_load_forecast": P_load_forecast, + "costfun": costfun, + "params": params, + "days_list": days_list, } return input_data_dict -def weather_forecast_cache(emhass_conf: dict, params: str, - runtimeparams: str, logger: logging.Logger) -> bool: + +def weather_forecast_cache( + emhass_conf: dict, params: str, runtimeparams: str, logger: logging.Logger +) -> bool: """ Perform a call to get forecast function, intend to save results to cache. @@ -289,14 +403,22 @@ def weather_forecast_cache(emhass_conf: dict, params: str, :rtype: bool """ - + # Parsing yaml retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params, logger) - + # Treat runtimeparams params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, "forecast", logger) - + runtimeparams, + params, + retrieve_hass_conf, + optim_conf, + plant_conf, + "forecast", + logger, + emhass_conf, + ) + # Make sure weather_forecast_cache is true if (params != None) and (params != "null"): params = json.loads(params) @@ -306,18 +428,23 @@ def weather_forecast_cache(emhass_conf: dict, params: str, params = json.dumps(params) # Create Forecast object - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger) + fcst = Forecast( + retrieve_hass_conf, optim_conf, plant_conf, params, emhass_conf, logger + ) - result = fcst.get_weather_forecast(optim_conf['weather_forecast_method']) + result = fcst.get_weather_forecast(optim_conf["weather_forecast_method"]) if isinstance(result, bool) and not result: return False return True -def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger, - save_data_to_file: Optional[bool] = True, - debug: Optional[bool] = False) -> pd.DataFrame: + +def perfect_forecast_optim( + input_data_dict: dict, + logger: logging.Logger, + save_data_to_file: Optional[bool] = True, + debug: Optional[bool] = False, +) -> pd.DataFrame: """ Perform a call to the perfect forecast optimization routine. @@ -335,43 +462,54 @@ def perfect_forecast_optim(input_data_dict: dict, logger: logging.Logger, """ logger.info("Performing perfect forecast optimization") # Load cost and prod price forecast - df_input_data = input_data_dict['fcst'].get_load_cost_forecast( - input_data_dict['df_input_data'], - method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method'], - list_and_perfect=True) + df_input_data = input_data_dict["fcst"].get_load_cost_forecast( + input_data_dict["df_input_data"], + method=input_data_dict["fcst"].optim_conf["load_cost_forecast_method"], + list_and_perfect=True, + ) if isinstance(df_input_data, bool) and not df_input_data: return False - df_input_data = input_data_dict['fcst'].get_prod_price_forecast( - df_input_data, method=input_data_dict['fcst'].optim_conf['production_price_forecast_method'], - list_and_perfect=True) + df_input_data = input_data_dict["fcst"].get_prod_price_forecast( + df_input_data, + method=input_data_dict["fcst"].optim_conf["production_price_forecast_method"], + list_and_perfect=True, + ) if isinstance(df_input_data, bool) and not df_input_data: return False - opt_res = input_data_dict['opt'].perform_perfect_forecast_optim( - df_input_data, input_data_dict['days_list']) + opt_res = input_data_dict["opt"].perform_perfect_forecast_optim( + df_input_data, input_data_dict["days_list"] + ) # Save CSV file for analysis if save_data_to_file: - filename = "opt_res_perfect_optim_" + \ - input_data_dict["costfun"] + ".csv" + filename = "opt_res_perfect_optim_" + input_data_dict["costfun"] + ".csv" else: # Just save the latest optimization results filename = "opt_res_latest.csv" if not debug: opt_res.to_csv( - input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp') - if not isinstance(input_data_dict["params"],dict): + input_data_dict["emhass_conf"]["data_path"] / filename, + index_label="timestamp", + ) + if not isinstance(input_data_dict["params"], dict): params = json.loads(input_data_dict["params"]) else: params = input_data_dict["params"] # if continual_publish, save perfect results to data_path/entities json - if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False): - #Trigger the publish function, save entity data and not post to HA - publish_data(input_data_dict, logger, entity_save=True, dont_post=True) + if input_data_dict["retrieve_hass_conf"].get("continual_publish", False) or params[ + "passed_data" + ].get("entity_save", False): + # Trigger the publish function, save entity data and not post to HA + publish_data(input_data_dict, logger, entity_save=True, dont_post=True) return opt_res -def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger, - save_data_to_file: Optional[bool] = False, - debug: Optional[bool] = False) -> pd.DataFrame: + +def dayahead_forecast_optim( + input_data_dict: dict, + logger: logging.Logger, + save_data_to_file: Optional[bool] = False, + debug: Optional[bool] = False, +) -> pd.DataFrame: """ Perform a call to the day-ahead optimization routine. @@ -389,21 +527,27 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger, """ logger.info("Performing day-ahead forecast optimization") # Load cost and prod price forecast - df_input_data_dayahead = input_data_dict['fcst'].get_load_cost_forecast( - input_data_dict['df_input_data_dayahead'], - method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method']) + df_input_data_dayahead = input_data_dict["fcst"].get_load_cost_forecast( + input_data_dict["df_input_data_dayahead"], + method=input_data_dict["fcst"].optim_conf["load_cost_forecast_method"], + ) if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead: return False - df_input_data_dayahead = input_data_dict['fcst'].get_prod_price_forecast( + df_input_data_dayahead = input_data_dict["fcst"].get_prod_price_forecast( df_input_data_dayahead, - method=input_data_dict['fcst'].optim_conf['production_price_forecast_method']) + method=input_data_dict["fcst"].optim_conf["production_price_forecast_method"], + ) if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead: return False if "outdoor_temperature_forecast" in input_data_dict["params"]["passed_data"]: - df_input_data_dayahead["outdoor_temperature_forecast"] = \ - input_data_dict["params"]["passed_data"]["outdoor_temperature_forecast"] - opt_res_dayahead = input_data_dict['opt'].perform_dayahead_forecast_optim( - df_input_data_dayahead, input_data_dict['P_PV_forecast'], input_data_dict['P_load_forecast']) + df_input_data_dayahead["outdoor_temperature_forecast"] = input_data_dict[ + "params" + ]["passed_data"]["outdoor_temperature_forecast"] + opt_res_dayahead = input_data_dict["opt"].perform_dayahead_forecast_optim( + df_input_data_dayahead, + input_data_dict["P_PV_forecast"], + input_data_dict["P_load_forecast"], + ) # Save CSV file for publish_data if save_data_to_file: today = datetime.now(timezone.utc).replace( @@ -414,23 +558,31 @@ def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger, filename = "opt_res_latest.csv" if not debug: opt_res_dayahead.to_csv( - input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp') - - if not isinstance(input_data_dict["params"],dict): + input_data_dict["emhass_conf"]["data_path"] / filename, + index_label="timestamp", + ) + + if not isinstance(input_data_dict["params"], dict): params = json.loads(input_data_dict["params"]) else: params = input_data_dict["params"] - + # if continual_publish, save day_ahead results to data_path/entities json - if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False): - #Trigger the publish function, save entity data and not post to HA - publish_data(input_data_dict, logger, entity_save=True, dont_post=True) - + if input_data_dict["retrieve_hass_conf"].get("continual_publish", False) or params[ + "passed_data" + ].get("entity_save", False): + # Trigger the publish function, save entity data and not post to HA + publish_data(input_data_dict, logger, entity_save=True, dont_post=True) + return opt_res_dayahead -def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger, - save_data_to_file: Optional[bool] = False, - debug: Optional[bool] = False) -> pd.DataFrame: + +def naive_mpc_optim( + input_data_dict: dict, + logger: logging.Logger, + save_data_to_file: Optional[bool] = False, + debug: Optional[bool] = False, +) -> pd.DataFrame: """ Perform a call to the naive Model Predictive Controller optimization routine. @@ -448,29 +600,46 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger, """ logger.info("Performing naive MPC optimization") # Load cost and prod price forecast - df_input_data_dayahead = input_data_dict['fcst'].get_load_cost_forecast( - input_data_dict['df_input_data_dayahead'], - method=input_data_dict['fcst'].optim_conf['load_cost_forecast_method']) + df_input_data_dayahead = input_data_dict["fcst"].get_load_cost_forecast( + input_data_dict["df_input_data_dayahead"], + method=input_data_dict["fcst"].optim_conf["load_cost_forecast_method"], + ) if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead: return False - df_input_data_dayahead = input_data_dict['fcst'].get_prod_price_forecast( - df_input_data_dayahead, method=input_data_dict['fcst'].optim_conf['production_price_forecast_method']) + df_input_data_dayahead = input_data_dict["fcst"].get_prod_price_forecast( + df_input_data_dayahead, + method=input_data_dict["fcst"].optim_conf["production_price_forecast_method"], + ) if isinstance(df_input_data_dayahead, bool) and not df_input_data_dayahead: return False if "outdoor_temperature_forecast" in input_data_dict["params"]["passed_data"]: - df_input_data_dayahead["outdoor_temperature_forecast"] = \ - input_data_dict["params"]["passed_data"]["outdoor_temperature_forecast"] + df_input_data_dayahead["outdoor_temperature_forecast"] = input_data_dict[ + "params" + ]["passed_data"]["outdoor_temperature_forecast"] # The specifics params for the MPC at runtime prediction_horizon = input_data_dict["params"]["passed_data"]["prediction_horizon"] soc_init = input_data_dict["params"]["passed_data"]["soc_init"] soc_final = input_data_dict["params"]["passed_data"]["soc_final"] - def_total_hours = input_data_dict["params"]["passed_data"]['operating_hours_of_each_deferrable_load'] - def_start_timestep = input_data_dict["params"]["passed_data"]['start_timesteps_of_each_deferrable_load'] - def_end_timestep = input_data_dict["params"]["passed_data"]['end_timesteps_of_each_deferrable_load'] + def_total_hours = input_data_dict["params"]["optim_conf"][ + "operating_hours_of_each_deferrable_load" + ] + def_start_timestep = input_data_dict["params"]["optim_conf"][ + "start_timesteps_of_each_deferrable_load" + ] + def_end_timestep = input_data_dict["params"]["optim_conf"][ + "end_timesteps_of_each_deferrable_load" + ] opt_res_naive_mpc = input_data_dict["opt"].perform_naive_mpc_optim( - df_input_data_dayahead, input_data_dict["P_PV_forecast"], input_data_dict["P_load_forecast"], - prediction_horizon, soc_init, soc_final, def_total_hours, - def_start_timestep, def_end_timestep) + df_input_data_dayahead, + input_data_dict["P_PV_forecast"], + input_data_dict["P_load_forecast"], + prediction_horizon, + soc_init, + soc_final, + def_total_hours, + def_start_timestep, + def_end_timestep, + ) # Save CSV file for publish_data if save_data_to_file: today = datetime.now(timezone.utc).replace( @@ -481,22 +650,28 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger, filename = "opt_res_latest.csv" if not debug: opt_res_naive_mpc.to_csv( - input_data_dict['emhass_conf']['data_path'] / filename, index_label='timestamp') - - if not isinstance(input_data_dict["params"],dict): + input_data_dict["emhass_conf"]["data_path"] / filename, + index_label="timestamp", + ) + + if not isinstance(input_data_dict["params"], dict): params = json.loads(input_data_dict["params"]) else: params = input_data_dict["params"] # if continual_publish, save mpc results to data_path/entities json - if input_data_dict["retrieve_hass_conf"].get('continual_publish',False) or params["passed_data"].get("entity_save",False): - #Trigger the publish function, save entity data and not post to HA - publish_data(input_data_dict, logger, entity_save=True, dont_post=True) + if input_data_dict["retrieve_hass_conf"].get("continual_publish", False) or params[ + "passed_data" + ].get("entity_save", False): + # Trigger the publish function, save entity data and not post to HA + publish_data(input_data_dict, logger, entity_save=True, dont_post=True) return opt_res_naive_mpc -def forecast_model_fit(input_data_dict: dict, logger: logging.Logger, - debug: Optional[bool] = False) -> Tuple[pd.DataFrame, pd.DataFrame, MLForecaster]: + +def forecast_model_fit( + input_data_dict: dict, logger: logging.Logger, debug: Optional[bool] = False +) -> Tuple[pd.DataFrame, pd.DataFrame, MLForecaster]: """Perform a forecast model fit from training data retrieved from Home Assistant. :param input_data_dict: A dictionnary with multiple data used by the action functions @@ -508,32 +683,43 @@ def forecast_model_fit(input_data_dict: dict, logger: logging.Logger, :return: The DataFrame containing the forecast data results without and with backtest and the `mlforecaster` object :rtype: Tuple[pd.DataFrame, pd.DataFrame, mlforecaster] """ - data = copy.deepcopy(input_data_dict['df_input_data']) - model_type = input_data_dict['params']['passed_data']['model_type'] - var_model = input_data_dict['params']['passed_data']['var_model'] - sklearn_model = input_data_dict['params']['passed_data']['sklearn_model'] - num_lags = input_data_dict['params']['passed_data']['num_lags'] - split_date_delta = input_data_dict['params']['passed_data']['split_date_delta'] - perform_backtest = input_data_dict['params']['passed_data']['perform_backtest'] + data = copy.deepcopy(input_data_dict["df_input_data"]) + model_type = input_data_dict["params"]["passed_data"]["model_type"] + var_model = input_data_dict["params"]["passed_data"]["var_model"] + sklearn_model = input_data_dict["params"]["passed_data"]["sklearn_model"] + num_lags = input_data_dict["params"]["passed_data"]["num_lags"] + split_date_delta = input_data_dict["params"]["passed_data"]["split_date_delta"] + perform_backtest = input_data_dict["params"]["passed_data"]["perform_backtest"] # The ML forecaster object - mlf = MLForecaster(data, model_type, var_model, sklearn_model, - num_lags, input_data_dict['emhass_conf'], logger) + mlf = MLForecaster( + data, + model_type, + var_model, + sklearn_model, + num_lags, + input_data_dict["emhass_conf"], + logger, + ) # Fit the ML model df_pred, df_pred_backtest = mlf.fit( split_date_delta=split_date_delta, perform_backtest=perform_backtest ) # Save model if not debug: - filename = model_type+'_mlf.pkl' - filename_path = input_data_dict['emhass_conf']['data_path'] / filename - with open(filename_path, 'wb') as outp: + filename = model_type + "_mlf.pkl" + filename_path = input_data_dict["emhass_conf"]["data_path"] / filename + with open(filename_path, "wb") as outp: pickle.dump(mlf, outp, pickle.HIGHEST_PROTOCOL) return df_pred, df_pred_backtest, mlf -def forecast_model_predict(input_data_dict: dict, logger: logging.Logger, - use_last_window: Optional[bool] = True, - debug: Optional[bool] = False, mlf: Optional[MLForecaster] = None - ) -> pd.DataFrame: + +def forecast_model_predict( + input_data_dict: dict, + logger: logging.Logger, + use_last_window: Optional[bool] = True, + debug: Optional[bool] = False, + mlf: Optional[MLForecaster] = None, +) -> pd.DataFrame: r"""Perform a forecast model predict using a previously trained skforecast model. :param input_data_dict: A dictionnary with multiple data used by the action functions @@ -555,9 +741,9 @@ def forecast_model_predict(input_data_dict: dict, logger: logging.Logger, :rtype: pd.DataFrame """ # Load model - model_type = input_data_dict['params']['passed_data']['model_type'] - filename = model_type+'_mlf.pkl' - filename_path = input_data_dict['emhass_conf']['data_path'] / filename + model_type = input_data_dict["params"]["passed_data"]["model_type"] + filename = model_type + "_mlf.pkl" + filename_path = input_data_dict["emhass_conf"]["data_path"] / filename if not debug: if filename_path.is_file(): with open(filename_path, "rb") as inp: @@ -592,24 +778,41 @@ def forecast_model_predict(input_data_dict: dict, logger: logging.Logger, now_precise = datetime.now( input_data_dict["retrieve_hass_conf"]["time_zone"] ).replace(second=0, microsecond=0) - if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest": - idx_closest = predictions.index.get_indexer([now_precise], method="nearest")[0] - elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first": - idx_closest = predictions.index.get_indexer([now_precise], method="ffill")[0] - elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last": - idx_closest = predictions.index.get_indexer([now_precise], method="bfill")[0] + if input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "nearest": + idx_closest = predictions.index.get_indexer( + [now_precise], method="nearest" + )[0] + elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "first": + idx_closest = predictions.index.get_indexer([now_precise], method="ffill")[ + 0 + ] + elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "last": + idx_closest = predictions.index.get_indexer([now_precise], method="bfill")[ + 0 + ] if idx_closest == -1: - idx_closest = predictions.index.get_indexer([now_precise], method="nearest")[0] + idx_closest = predictions.index.get_indexer( + [now_precise], method="nearest" + )[0] # Publish Load forecast input_data_dict["rh"].post_data( - predictions, idx_closest, model_predict_entity_id, - model_predict_unit_of_measurement, model_predict_friendly_name, - type_var="mlforecaster", publish_prefix=publish_prefix) + predictions, + idx_closest, + model_predict_entity_id, + model_predict_unit_of_measurement, + model_predict_friendly_name, + type_var="mlforecaster", + publish_prefix=publish_prefix, + ) return predictions -def forecast_model_tune(input_data_dict: dict, logger: logging.Logger, - debug: Optional[bool] = False, mlf: Optional[MLForecaster] = None - ) -> Tuple[pd.DataFrame, MLForecaster]: + +def forecast_model_tune( + input_data_dict: dict, + logger: logging.Logger, + debug: Optional[bool] = False, + mlf: Optional[MLForecaster] = None, +) -> Tuple[pd.DataFrame, MLForecaster]: """Tune a forecast model hyperparameters using bayesian optimization. :param input_data_dict: A dictionnary with multiple data used by the action functions @@ -625,9 +828,9 @@ def forecast_model_tune(input_data_dict: dict, logger: logging.Logger, :rtype: pd.DataFrame """ # Load model - model_type = input_data_dict['params']['passed_data']['model_type'] - filename = model_type+'_mlf.pkl' - filename_path = input_data_dict['emhass_conf']['data_path'] / filename + model_type = input_data_dict["params"]["passed_data"]["model_type"] + filename = model_type + "_mlf.pkl" + filename_path = input_data_dict["emhass_conf"]["data_path"] / filename if not debug: if filename_path.is_file(): with open(filename_path, "rb") as inp: @@ -641,14 +844,16 @@ def forecast_model_tune(input_data_dict: dict, logger: logging.Logger, df_pred_optim = mlf.tune(debug=debug) # Save model if not debug: - filename = model_type+'_mlf.pkl' - filename_path = input_data_dict['emhass_conf']['data_path'] / filename - with open(filename_path, 'wb') as outp: + filename = model_type + "_mlf.pkl" + filename_path = input_data_dict["emhass_conf"]["data_path"] / filename + with open(filename_path, "wb") as outp: pickle.dump(mlf, outp, pickle.HIGHEST_PROTOCOL) return df_pred_optim, mlf -def regressor_model_fit(input_data_dict: dict, logger: logging.Logger, - debug: Optional[bool] = False) -> MLRegressor: + +def regressor_model_fit( + input_data_dict: dict, logger: logging.Logger, debug: Optional[bool] = False +) -> MLRegressor: """Perform a forecast model fit from training data retrieved from Home Assistant. :param input_data_dict: A dictionnary with multiple data used by the action functions @@ -690,7 +895,9 @@ def regressor_model_fit(input_data_dict: dict, logger: logging.Logger, logger.error("parameter: 'date_features' not passed") return False # The MLRegressor object - mlr = MLRegressor(data, model_type, regression_model, features, target, timestamp, logger) + mlr = MLRegressor( + data, model_type, regression_model, features, target, timestamp, logger + ) # Fit the ML model fit = mlr.fit(date_features=date_features) if not fit: @@ -703,9 +910,13 @@ def regressor_model_fit(input_data_dict: dict, logger: logging.Logger, pickle.dump(mlr, outp, pickle.HIGHEST_PROTOCOL) return mlr -def regressor_model_predict(input_data_dict: dict, logger: logging.Logger, - debug: Optional[bool] = False, mlr: Optional[MLRegressor] = None - ) -> np.ndarray: + +def regressor_model_predict( + input_data_dict: dict, + logger: logging.Logger, + debug: Optional[bool] = False, + mlr: Optional[MLRegressor] = None, +) -> np.ndarray: """Perform a prediction from csv file. :param input_data_dict: A dictionnary with multiple data used by the action functions @@ -739,24 +950,36 @@ def regressor_model_predict(input_data_dict: dict, logger: logging.Logger, # Predict from csv file prediction = mlr.predict(new_values) mlr_predict_entity_id = input_data_dict["params"]["passed_data"].get( - "mlr_predict_entity_id", "sensor.mlr_predict") + "mlr_predict_entity_id", "sensor.mlr_predict" + ) mlr_predict_unit_of_measurement = input_data_dict["params"]["passed_data"].get( - "mlr_predict_unit_of_measurement", "h") + "mlr_predict_unit_of_measurement", "h" + ) mlr_predict_friendly_name = input_data_dict["params"]["passed_data"].get( - "mlr_predict_friendly_name", "mlr predictor") + "mlr_predict_friendly_name", "mlr predictor" + ) # Publish prediction idx = 0 if not debug: - input_data_dict["rh"].post_data(prediction, idx, mlr_predict_entity_id, - mlr_predict_unit_of_measurement, mlr_predict_friendly_name, - type_var="mlregressor") + input_data_dict["rh"].post_data( + prediction, + idx, + mlr_predict_entity_id, + mlr_predict_unit_of_measurement, + mlr_predict_friendly_name, + type_var="mlregressor", + ) return prediction -def publish_data(input_data_dict: dict, logger: logging.Logger, - save_data_to_file: Optional[bool] = False, - opt_res_latest: Optional[pd.DataFrame] = None, - entity_save: Optional[bool] = False, - dont_post: Optional[bool] = False) -> pd.DataFrame: + +def publish_data( + input_data_dict: dict, + logger: logging.Logger, + save_data_to_file: Optional[bool] = False, + opt_res_latest: Optional[pd.DataFrame] = None, + entity_save: Optional[bool] = False, + dont_post: Optional[bool] = False, +) -> pd.DataFrame: """ Publish the data obtained from the optimization results. @@ -776,12 +999,11 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, """ logger.info("Publishing data to HASS instance") if input_data_dict: - if not isinstance(input_data_dict.get("params",{}),dict): + if not isinstance(input_data_dict.get("params", {}), dict): params = json.loads(input_data_dict["params"]) else: - params = input_data_dict.get("params",{}) + params = input_data_dict.get("params", {}) - # Check if a day ahead optimization has been performed (read CSV file) if save_data_to_file: today = datetime.now(timezone.utc).replace( @@ -789,65 +1011,80 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, ) filename = "opt_res_dayahead_" + today.strftime("%Y_%m_%d") + ".csv" # If publish_prefix is passed, check if there is saved entities in data_path/entities with prefix, publish to results - elif params["passed_data"].get("publish_prefix","") != "" and not dont_post: + elif params["passed_data"].get("publish_prefix", "") != "" and not dont_post: opt_res_list = [] opt_res_list_names = [] publish_prefix = params["passed_data"]["publish_prefix"] - entity_path = input_data_dict['emhass_conf']['data_path'] / "entities" + entity_path = input_data_dict["emhass_conf"]["data_path"] / "entities" # Check if items in entity_path if os.path.exists(entity_path) and len(os.listdir(entity_path)) > 0: # Obtain all files in entity_path - entity_path_contents = os.listdir(entity_path) + entity_path_contents = os.listdir(entity_path) # Confirm the entity path contains at least one file containing publish prefix or publish_prefix='all' - if any(publish_prefix in entity for entity in entity_path_contents) or publish_prefix == "all": + if ( + any(publish_prefix in entity for entity in entity_path_contents) + or publish_prefix == "all" + ): # Loop through all items in entity path for entity in entity_path_contents: - # If publish_prefix is "all" publish all saved entities to Home Assistant - # If publish_prefix matches the prefix from saved entities, publish to Home Assistant - if entity != "metadata.json" and (publish_prefix in entity or publish_prefix == "all"): - entity_data = publish_json(entity,input_data_dict,entity_path,logger) - if not isinstance(entity_data, bool): - opt_res_list.append(entity_data) - opt_res_list_names.append(entity.replace(".json", "")) - else: - return False + # If publish_prefix is "all" publish all saved entities to Home Assistant + # If publish_prefix matches the prefix from saved entities, publish to Home Assistant + if entity != "metadata.json" and ( + publish_prefix in entity or publish_prefix == "all" + ): + entity_data = publish_json( + entity, input_data_dict, entity_path, logger + ) + if not isinstance(entity_data, bool): + opt_res_list.append(entity_data) + opt_res_list_names.append(entity.replace(".json", "")) + else: + return False # Build a DataFrame with published entities opt_res = pd.concat(opt_res_list, axis=1) opt_res.columns = opt_res_list_names return opt_res else: - logger.warning("No saved entity json files that match prefix: " + str(publish_prefix)) + logger.warning( + "No saved entity json files that match prefix: " + + str(publish_prefix) + ) logger.warning("Falling back to opt_res_latest") else: - logger.warning("No saved entity json files in path:" + str(entity_path)) + logger.warning("No saved entity json files in path:" + str(entity_path)) logger.warning("Falling back to opt_res_latest") - filename = "opt_res_latest.csv" + filename = "opt_res_latest.csv" else: filename = "opt_res_latest.csv" if opt_res_latest is None: - if not os.path.isfile(input_data_dict['emhass_conf']['data_path'] / filename): - logger.error( - "File not found error, run an optimization task first.") + if not os.path.isfile(input_data_dict["emhass_conf"]["data_path"] / filename): + logger.error("File not found error, run an optimization task first.") return else: opt_res_latest = pd.read_csv( - input_data_dict['emhass_conf']['data_path'] / filename, index_col='timestamp') + input_data_dict["emhass_conf"]["data_path"] / filename, + index_col="timestamp", + ) opt_res_latest.index = pd.to_datetime(opt_res_latest.index) - opt_res_latest.index.freq = input_data_dict["retrieve_hass_conf"]['optimization_time_step'] + opt_res_latest.index.freq = input_data_dict["retrieve_hass_conf"][ + "optimization_time_step" + ] # Estimate the current index now_precise = datetime.now( input_data_dict["retrieve_hass_conf"]["time_zone"] ).replace(second=0, microsecond=0) - if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest": - idx_closest = opt_res_latest.index.get_indexer([now_precise], method="nearest")[0] - elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first": - idx_closest = opt_res_latest.index.get_indexer( - [now_precise], method="ffill")[0] - elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last": - idx_closest = opt_res_latest.index.get_indexer( - [now_precise], method="bfill")[0] + if input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "nearest": + idx_closest = opt_res_latest.index.get_indexer([now_precise], method="nearest")[ + 0 + ] + elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "first": + idx_closest = opt_res_latest.index.get_indexer([now_precise], method="ffill")[0] + elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "last": + idx_closest = opt_res_latest.index.get_indexer([now_precise], method="bfill")[0] if idx_closest == -1: - idx_closest = opt_res_latest.index.get_indexer([now_precise], method="nearest")[0] + idx_closest = opt_res_latest.index.get_indexer([now_precise], method="nearest")[ + 0 + ] # Publish the data publish_prefix = params["passed_data"]["publish_prefix"] # Publish PV forecast @@ -861,7 +1098,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="power", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) # Publish Load forecast custom_load_forecast_id = params["passed_data"]["custom_load_forecast_id"] @@ -874,11 +1111,11 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="power", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = ["P_PV", "P_Load"] # Publish PV curtailment - if input_data_dict["fcst"].plant_conf['compute_curtailment']: + if input_data_dict["fcst"].plant_conf["compute_curtailment"]: custom_pv_curtailment_id = params["passed_data"]["custom_pv_curtailment_id"] input_data_dict["rh"].post_data( opt_res_latest["P_PV_curtailment"], @@ -889,11 +1126,11 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="power", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["P_PV_curtailment"] # Publish P_hybrid_inverter - if input_data_dict["fcst"].plant_conf['inverter_is_hybrid']: + if input_data_dict["fcst"].plant_conf["inverter_is_hybrid"]: custom_hybrid_inverter_id = params["passed_data"]["custom_hybrid_inverter_id"] input_data_dict["rh"].post_data( opt_res_latest["P_hybrid_inverter"], @@ -904,14 +1141,14 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="power", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["P_hybrid_inverter"] # Publish deferrable loads custom_deferrable_forecast_id = params["passed_data"][ "custom_deferrable_forecast_id" ] - for k in range(input_data_dict["opt"].optim_conf['number_of_deferrable_loads']): + for k in range(input_data_dict["opt"].optim_conf["number_of_deferrable_loads"]): if "P_deferrable{}".format(k) not in opt_res_latest.columns: logger.error( "P_deferrable{}".format(k) @@ -927,16 +1164,19 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="deferrable", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["P_deferrable{}".format(k)] # Publish thermal model data (predicted temperature) custom_predicted_temperature_id = params["passed_data"][ "custom_predicted_temperature_id" ] - for k in range(input_data_dict["opt"].optim_conf['number_of_deferrable_loads']): + for k in range(input_data_dict["opt"].optim_conf["number_of_deferrable_loads"]): if "def_load_config" in input_data_dict["opt"].optim_conf.keys(): - if "thermal_config" in input_data_dict["opt"].optim_conf["def_load_config"][k]: + if ( + "thermal_config" + in input_data_dict["opt"].optim_conf["def_load_config"][k] + ): input_data_dict["rh"].post_data( opt_res_latest["predicted_temp_heater{}".format(k)], idx_closest, @@ -946,11 +1186,11 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="temperature", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["predicted_temp_heater{}".format(k)] # Publish battery power - if input_data_dict["opt"].optim_conf['set_use_battery']: + if input_data_dict["opt"].optim_conf["set_use_battery"]: if "P_batt" not in opt_res_latest.columns: logger.error( "P_batt was not found in results DataFrame. Optimization task may need to be relaunched or it did not converge to a solution.", @@ -966,7 +1206,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="batt", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["P_batt"] custom_batt_soc_forecast_id = params["passed_data"][ @@ -981,7 +1221,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="SOC", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["SOC_opt"] # Publish grid power @@ -995,7 +1235,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="power", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["P_grid"] # Publish total value of cost function @@ -1010,7 +1250,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="cost_fun", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) # cols_published = cols_published + col_cost_fun # Publish the optimization status @@ -1030,7 +1270,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="optim_status", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["optim_status"] # Publish unit_load_cost @@ -1044,7 +1284,7 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="unit_load_cost", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["unit_load_cost"] # Publish unit_prod_price @@ -1058,15 +1298,17 @@ def publish_data(input_data_dict: dict, logger: logging.Logger, type_var="unit_prod_price", publish_prefix=publish_prefix, save_entities=entity_save, - dont_post=dont_post + dont_post=dont_post, ) cols_published = cols_published + ["unit_prod_price"] # Create a DF resuming what has been published - opt_res = opt_res_latest[cols_published].loc[[ - opt_res_latest.index[idx_closest]]] + opt_res = opt_res_latest[cols_published].loc[[opt_res_latest.index[idx_closest]]] return opt_res -def continual_publish(input_data_dict: dict, entity_path: pathlib.Path, logger: logging.Logger): + +def continual_publish( + input_data_dict: dict, entity_path: pathlib.Path, logger: logging.Logger +): """ If continual_publish is true and a entity file saved in /data_path/entities, continually publish sensor on freq rate, updating entity current state value based on timestamp @@ -1079,31 +1321,56 @@ def continual_publish(input_data_dict: dict, entity_path: pathlib.Path, logger: """ logger.info("Continual publish thread service started") - freq = input_data_dict['retrieve_hass_conf'].get('optimization_time_step', pd.to_timedelta(1, "minutes")) + freq = input_data_dict["retrieve_hass_conf"].get( + "optimization_time_step", pd.to_timedelta(1, "minutes") + ) entity_path_contents = [] while True: # Sleep for x seconds (using current time as a reference for time left) - time.sleep(max(0,freq.total_seconds() - (datetime.now(input_data_dict["retrieve_hass_conf"]["time_zone"]).timestamp() % 60))) + time.sleep( + max( + 0, + freq.total_seconds() + - ( + datetime.now( + input_data_dict["retrieve_hass_conf"]["time_zone"] + ).timestamp() + % 60 + ), + ) + ) # Loop through all saved entity files if os.path.exists(entity_path) and len(os.listdir(entity_path)) > 0: - entity_path_contents = os.listdir(entity_path) + entity_path_contents = os.listdir(entity_path) for entity in entity_path_contents: if entity != "metadata.json": - # Call publish_json with entity file, build entity, and publish - publish_json(entity, input_data_dict, entity_path, logger, 'continual_publish') - # Retrieve entity metadata from file + # Call publish_json with entity file, build entity, and publish + publish_json( + entity, + input_data_dict, + entity_path, + logger, + "continual_publish", + ) + # Retrieve entity metadata from file if os.path.isfile(entity_path / "metadata.json"): with open(entity_path / "metadata.json", "r") as file: metadata = json.load(file) # Check if freq should be shorter - if not metadata.get("lowest_time_step",None) == None: + if not metadata.get("lowest_time_step", None) == None: freq = pd.to_timedelta(metadata["lowest_time_step"], "minutes") - pass - # This function should never return - return False - -def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path, - logger: logging.Logger, reference: Optional[str] = ""): + pass + # This function should never return + return False + + +def publish_json( + entity: dict, + input_data_dict: dict, + entity_path: pathlib.Path, + logger: logging.Logger, + reference: Optional[str] = "", +): """ Extract saved entity data from .json (in data_path/entities), build entity, post results to post_data @@ -1115,9 +1382,9 @@ def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path, :type entity_path: Path :param logger: The passed logger object :type logger: logging.Logger - :param reference: String for identifying who ran the function + :param reference: String for identifying who ran the function :type reference: str, optional - + """ # Retrieve entity metadata from file if os.path.isfile(entity_path / "metadata.json"): @@ -1125,32 +1392,38 @@ def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path, metadata = json.load(file) else: logger.error("unable to located metadata.json in:" + entity_path) - return False + return False # Round current timecode (now) - now_precise = datetime.now(input_data_dict["retrieve_hass_conf"]["time_zone"]).replace(second=0, microsecond=0) + now_precise = datetime.now( + input_data_dict["retrieve_hass_conf"]["time_zone"] + ).replace(second=0, microsecond=0) # Retrieve entity data from file - entity_data = pd.read_json(entity_path / entity , orient='index') + entity_data = pd.read_json(entity_path / entity, orient="index") # Remove ".json" from string for entity_id entity_id = entity.replace(".json", "") # Adjust Dataframe from received entity json file entity_data.columns = [metadata[entity_id]["name"]] entity_data.index.name = "timestamp" - entity_data.index = pd.to_datetime(entity_data.index).tz_convert(input_data_dict["retrieve_hass_conf"]["time_zone"]) - entity_data.index.freq = pd.to_timedelta(int(metadata[entity_id]['optimization_time_step']), "minutes") + entity_data.index = pd.to_datetime(entity_data.index).tz_convert( + input_data_dict["retrieve_hass_conf"]["time_zone"] + ) + entity_data.index.freq = pd.to_timedelta( + int(metadata[entity_id]["optimization_time_step"]), "minutes" + ) # Calculate the current state value - if input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "nearest": + if input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "nearest": idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0] - elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "first": + elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "first": idx_closest = entity_data.index.get_indexer([now_precise], method="ffill")[0] - elif input_data_dict["retrieve_hass_conf"]['method_ts_round'] == "last": + elif input_data_dict["retrieve_hass_conf"]["method_ts_round"] == "last": idx_closest = entity_data.index.get_indexer([now_precise], method="bfill")[0] if idx_closest == -1: idx_closest = entity_data.index.get_indexer([now_precise], method="nearest")[0] - # Call post data - if reference == 'continual_publish': + # Call post data + if reference == "continual_publish": logger.debug("Auto Published sensor:") logger_levels = "DEBUG" - else: + else: logger_levels = "INFO" # post/save entity input_data_dict["rh"].post_data( @@ -1159,9 +1432,9 @@ def publish_json(entity: dict, input_data_dict: dict, entity_path: pathlib.Path, entity_id=entity_id, unit_of_measurement=metadata[entity_id]["unit_of_measurement"], friendly_name=metadata[entity_id]["friendly_name"], - type_var=metadata[entity_id].get("type_var",""), + type_var=metadata[entity_id].get("type_var", ""), save_entities=False, - logger_levels=logger_levels + logger_levels=logger_levels, ) return entity_data[metadata[entity_id]["name"]] @@ -1189,70 +1462,100 @@ def main(): """ # Parsing arguments parser = argparse.ArgumentParser() - parser.add_argument('--action', type=str, help='Set the desired action, options are: perfect-optim, dayahead-optim,\ - naive-mpc-optim, publish-data, forecast-model-fit, forecast-model-predict, forecast-model-tune') - parser.add_argument('--config', type=str, - help='Define path to the config.json/defaults.json file') - parser.add_argument('--params', type=str, default=None, - help='String of configuration parameters passed') - parser.add_argument('--data', type=str, - help='Define path to the Data files (.csv & .pkl)') - parser.add_argument('--root', type=str, help='Define path emhass root') - parser.add_argument('--costfun', type=str, default='profit', - help='Define the type of cost function, options are: profit, cost, self-consumption') - parser.add_argument('--log2file', type=strtobool, default='False', - help='Define if we should log to a file or not') - parser.add_argument('--secrets', type=str, default=None, - help='Define secret parameter file (secrets_emhass.yaml) path') - parser.add_argument('--runtimeparams', type=str, default=None, - help='Pass runtime optimization parameters as dictionnary') - parser.add_argument('--debug', type=strtobool, - default='False', help='Use True for testing purposes') + parser.add_argument( + "--action", + type=str, + help="Set the desired action, options are: perfect-optim, dayahead-optim,\ + naive-mpc-optim, publish-data, forecast-model-fit, forecast-model-predict, forecast-model-tune", + ) + parser.add_argument( + "--config", type=str, help="Define path to the config.json/defaults.json file" + ) + parser.add_argument( + "--params", + type=str, + default=None, + help="String of configuration parameters passed", + ) + parser.add_argument( + "--data", type=str, help="Define path to the Data files (.csv & .pkl)" + ) + parser.add_argument("--root", type=str, help="Define path emhass root") + parser.add_argument( + "--costfun", + type=str, + default="profit", + help="Define the type of cost function, options are: profit, cost, self-consumption", + ) + parser.add_argument( + "--log2file", + type=strtobool, + default="False", + help="Define if we should log to a file or not", + ) + parser.add_argument( + "--secrets", + type=str, + default=None, + help="Define secret parameter file (secrets_emhass.yaml) path", + ) + parser.add_argument( + "--runtimeparams", + type=str, + default=None, + help="Pass runtime optimization parameters as dictionnary", + ) + parser.add_argument( + "--debug", type=strtobool, default="False", help="Use True for testing purposes" + ) args = parser.parse_args() - + # The path to the configuration files if args.config is not None: config_path = pathlib.Path(args.config) else: - config_path = pathlib.Path(str(utils.get_root(__file__, num_parent=3) / 'config.json')) + config_path = pathlib.Path( + str(utils.get_root(__file__, num_parent=3) / "config.json") + ) if args.data is not None: data_path = pathlib.Path(args.data) else: - data_path = (config_path.parent / 'data/') + data_path = config_path.parent / "data/" if args.root is not None: root_path = pathlib.Path(args.root) else: - root_path = utils.get_root(__file__, num_parent=1) + root_path = utils.get_root(__file__, num_parent=1) if args.secrets is not None: secrets_path = pathlib.Path(args.secrets) else: - secrets_path = pathlib.Path(config_path.parent / 'secrets_emhass.yaml') + secrets_path = pathlib.Path(config_path.parent / "secrets_emhass.yaml") + + associations_path = root_path / "data/associations.csv" + defaults_path = root_path / "data/config_defaults.json" - associations_path = root_path / 'data/associations.csv' - defaults_path = root_path / 'data/config_defaults.json' - emhass_conf = {} - emhass_conf['config_path'] = config_path - emhass_conf['data_path'] = data_path - emhass_conf['root_path'] = root_path - emhass_conf['associations_path'] = associations_path - emhass_conf['defaults_path'] = defaults_path + emhass_conf["config_path"] = config_path + emhass_conf["data_path"] = data_path + emhass_conf["root_path"] = root_path + emhass_conf["associations_path"] = associations_path + emhass_conf["defaults_path"] = defaults_path # create logger logger, ch = utils.get_logger( - __name__, emhass_conf, save_to_file=bool(args.log2file)) - + __name__, emhass_conf, save_to_file=bool(args.log2file) + ) + # Check paths logger.debug("config path: " + str(config_path)) logger.debug("data path: " + str(data_path)) logger.debug("root path: " + str(root_path)) - if not associations_path.exists(): + if not associations_path.exists(): logger.error( - "Could not find associations.csv file in: " + str(associations_path)) + "Could not find associations.csv file in: " + str(associations_path) + ) logger.error("Try setting config file path with --associations") return False - if not config_path.exists(): - logger.warning( - "Could not find config.json file in: " + str(config_path)) + if not config_path.exists(): + logger.warning("Could not find config.json file in: " + str(config_path)) logger.warning("Try setting config file path with --config") if not secrets_path.exists(): logger.warning("Could not find secrets file in: " + str(secrets_path)) @@ -1265,7 +1568,7 @@ def main(): logger.error("Could not find emhass/src folder in: " + str(root_path)) logger.error("Try setting emhass root path with --root") return False - + # Additional argument try: parser.add_argument( @@ -1285,47 +1588,60 @@ def main(): if config_path.exists(): config_file_ending = re.findall("(?<=\.).*$", str(config_path)) if len(config_file_ending) > 0: - match(config_file_ending[0]): + match config_file_ending[0]: case "json": - config = utils.build_config(emhass_conf,logger,defaults_path,config_path) + config = utils.build_config( + emhass_conf, logger, defaults_path, config_path + ) case "yaml": - config = utils.build_config(emhass_conf,logger,defaults_path,config_path=config_path) + config = utils.build_config( + emhass_conf, logger, defaults_path, config_path=config_path + ) case "yml": - config = utils.build_config(emhass_conf,logger,defaults_path,config_path=config_path) + config = utils.build_config( + emhass_conf, logger, defaults_path, config_path=config_path + ) # If unable to find config file, use only defaults_config.json else: - logger.warning("Unable to obtain config.json file, building parameters with only defaults") - config = utils.build_config(emhass_conf,logger,defaults_path) + logger.warning( + "Unable to obtain config.json file, building parameters with only defaults" + ) + config = utils.build_config(emhass_conf, logger, defaults_path) if type(config) is bool and not config: raise Exception("Failed to find default config") - # Obtain secrets from secrets_emhass.yaml? params_secrets = {} - emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger,secrets_path=secrets_path) + emhass_conf, built_secrets = utils.build_secrets( + emhass_conf, logger, secrets_path=secrets_path + ) params_secrets.update(built_secrets) # Build params params = utils.build_params(emhass_conf, params_secrets, config, logger) if type(params) is bool: - raise Exception("A error has occurred while building parameters") + raise Exception("A error has occurred while building parameters") # Add any passed params from args to params if args.params: params.update(json.loads(args.params)) - - input_data_dict = set_input_data_dict(emhass_conf, - args.costfun, json.dumps(params), args.runtimeparams, args.action, - logger, args.debug) + + input_data_dict = set_input_data_dict( + emhass_conf, + args.costfun, + json.dumps(params), + args.runtimeparams, + args.action, + logger, + args.debug, + ) if type(input_data_dict) is bool: - raise Exception("A error has occurred while creating action objects") + raise Exception("A error has occurred while creating action objects") # Perform selected action if args.action == "perfect-optim": - opt_res = perfect_forecast_optim( - input_data_dict, logger, debug=args.debug) + opt_res = perfect_forecast_optim(input_data_dict, logger, debug=args.debug) elif args.action == "dayahead-optim": - opt_res = dayahead_forecast_optim( - input_data_dict, logger, debug=args.debug) + opt_res = dayahead_forecast_optim(input_data_dict, logger, debug=args.debug) elif args.action == "naive-mpc-optim": opt_res = naive_mpc_optim(input_data_dict, logger, debug=args.debug) elif args.action == "forecast-model-fit": @@ -1338,14 +1654,18 @@ def main(): _, _, mlf = forecast_model_fit(input_data_dict, logger, debug=args.debug) else: mlf = None - df_pred = forecast_model_predict(input_data_dict, logger, debug=args.debug, mlf=mlf) + df_pred = forecast_model_predict( + input_data_dict, logger, debug=args.debug, mlf=mlf + ) opt_res = None elif args.action == "forecast-model-tune": if args.debug: _, _, mlf = forecast_model_fit(input_data_dict, logger, debug=args.debug) else: mlf = None - df_pred_optim, mlf = forecast_model_tune(input_data_dict, logger, debug=args.debug, mlf=mlf) + df_pred_optim, mlf = forecast_model_tune( + input_data_dict, logger, debug=args.debug, mlf=mlf + ) opt_res = None elif args.action == "regressor-model-fit": mlr = regressor_model_fit(input_data_dict, logger, debug=args.debug) @@ -1355,13 +1675,17 @@ def main(): mlr = regressor_model_fit(input_data_dict, logger, debug=args.debug) else: mlr = None - prediction = regressor_model_predict(input_data_dict, logger, debug=args.debug,mlr=mlr) + prediction = regressor_model_predict( + input_data_dict, logger, debug=args.debug, mlr=mlr + ) opt_res = None elif args.action == "publish-data": - opt_res = publish_data(input_data_dict,logger) + opt_res = publish_data(input_data_dict, logger) else: logger.error("The passed action argument is not valid") - logger.error("Try setting --action: perfect-optim, dayahead-optim, naive-mpc-optim, forecast-model-fit, forecast-model-predict, forecast-model-tune or publish-data") + logger.error( + "Try setting --action: perfect-optim, dayahead-optim, naive-mpc-optim, forecast-model-fit, forecast-model-predict, forecast-model-tune or publish-data" + ) opt_res = None logger.info(opt_res) # Flush the logger diff --git a/src/emhass/forecast.py b/src/emhass/forecast.py index 366c6925..8afd14ee 100644 --- a/src/emhass/forecast.py +++ b/src/emhass/forecast.py @@ -1,29 +1,29 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -import pathlib -import os -import pickle +import bz2 import copy -import logging import json -from typing import Optional -import bz2 +import logging +import os +import pickle import pickle as cPickle -import pandas as pd -import numpy as np from datetime import datetime, timedelta -from requests import get -from bs4 import BeautifulSoup +from typing import Optional + +import numpy as np +import pandas as pd import pvlib -from pvlib.pvsystem import PVSystem +from bs4 import BeautifulSoup +from pvlib.irradiance import disc from pvlib.location import Location from pvlib.modelchain import ModelChain +from pvlib.pvsystem import PVSystem from pvlib.temperature import TEMPERATURE_MODEL_PARAMETERS -from pvlib.irradiance import disc +from requests import get -from emhass.retrieve_hass import RetrieveHass from emhass.machine_learning_forecaster import MLForecaster +from emhass.retrieve_hass import RetrieveHass from emhass.utils import get_days_list, set_df_index_freq @@ -98,13 +98,20 @@ class Forecast(object): """ - def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, - params: str, emhass_conf: dict, logger: logging.Logger, - opt_time_delta: Optional[int] = 24, - get_data_from_file: Optional[bool] = False) -> None: + def __init__( + self, + retrieve_hass_conf: dict, + optim_conf: dict, + plant_conf: dict, + params: str, + emhass_conf: dict, + logger: logging.Logger, + opt_time_delta: Optional[int] = 24, + get_data_from_file: Optional[bool] = False, + ) -> None: """ Define constructor for the forecast class. - + :param retrieve_hass_conf: Dictionary containing the needed configuration data from the configuration file, specific to retrieve data from HASS :type retrieve_hass_conf: dict @@ -120,10 +127,10 @@ def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, :type emhass_conf: dict :param logger: The passed logger object :type logger: logging object - :param opt_time_delta: The time delta in hours used to generate forecasts, + :param opt_time_delta: The time delta in hours used to generate forecasts, a value of 24 will generate 24 hours of forecast data, defaults to 24 :type opt_time_delta: int, optional - :param get_data_from_file: Select if data should be retrieved from a + :param get_data_from_file: Select if data should be retrieved from a previously saved pickle useful for testing or directly from connection to hass database :type get_data_from_file: bool, optional @@ -132,47 +139,71 @@ def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, self.retrieve_hass_conf = retrieve_hass_conf self.optim_conf = optim_conf self.plant_conf = plant_conf - self.freq = self.retrieve_hass_conf['optimization_time_step'] - self.time_zone = self.retrieve_hass_conf['time_zone'] - self.method_ts_round = self.retrieve_hass_conf['method_ts_round'] - self.timeStep = self.freq.seconds/3600 # in hours + self.freq = self.retrieve_hass_conf["optimization_time_step"] + self.time_zone = self.retrieve_hass_conf["time_zone"] + self.method_ts_round = self.retrieve_hass_conf["method_ts_round"] + self.timeStep = self.freq.seconds / 3600 # in hours self.time_delta = pd.to_timedelta(opt_time_delta, "hours") - self.var_PV = self.retrieve_hass_conf['sensor_power_photovoltaics'] - self.var_load = self.retrieve_hass_conf['sensor_power_load_no_var_loads'] - self.var_load_new = self.var_load+'_positive' - self.lat = self.retrieve_hass_conf['Latitude'] - self.lon = self.retrieve_hass_conf['Longitude'] + self.var_PV = self.retrieve_hass_conf["sensor_power_photovoltaics"] + self.var_load = self.retrieve_hass_conf["sensor_power_load_no_var_loads"] + self.var_load_new = self.var_load + "_positive" + self.lat = self.retrieve_hass_conf["Latitude"] + self.lon = self.retrieve_hass_conf["Longitude"] self.emhass_conf = emhass_conf self.logger = logger self.get_data_from_file = get_data_from_file - self.var_load_cost = 'unit_load_cost' - self.var_prod_price = 'unit_prod_price' + self.var_load_cost = "unit_load_cost" + self.var_prod_price = "unit_prod_price" if (params == None) or (params == "null"): self.params = {} elif type(params) is dict: self.params = params else: self.params = json.loads(params) - if self.method_ts_round == 'nearest': - self.start_forecast = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0) - elif self.method_ts_round == 'first': - self.start_forecast = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0).floor(freq=self.freq) - elif self.method_ts_round == 'last': - self.start_forecast = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0).ceil(freq=self.freq) + if self.method_ts_round == "nearest": + self.start_forecast = pd.Timestamp( + datetime.now(), tz=self.time_zone + ).replace(microsecond=0) + elif self.method_ts_round == "first": + self.start_forecast = ( + pd.Timestamp(datetime.now(), tz=self.time_zone) + .replace(microsecond=0) + .floor(freq=self.freq) + ) + elif self.method_ts_round == "last": + self.start_forecast = ( + pd.Timestamp(datetime.now(), tz=self.time_zone) + .replace(microsecond=0) + .ceil(freq=self.freq) + ) else: self.logger.error("Wrong method_ts_round passed parameter") - self.end_forecast = (self.start_forecast + self.optim_conf['delta_forecast_daily']).replace(microsecond=0) - self.forecast_dates = pd.date_range(start=self.start_forecast, - end=self.end_forecast-self.freq, - freq=self.freq, tz=self.time_zone).tz_convert('utc').round(self.freq, ambiguous='infer', nonexistent='shift_forward').tz_convert(self.time_zone) + self.end_forecast = ( + self.start_forecast + self.optim_conf["delta_forecast_daily"] + ).replace(microsecond=0) + self.forecast_dates = ( + pd.date_range( + start=self.start_forecast, + end=self.end_forecast - self.freq, + freq=self.freq, + tz=self.time_zone, + ) + .tz_convert("utc") + .round(self.freq, ambiguous="infer", nonexistent="shift_forward") + .tz_convert(self.time_zone) + ) if params is not None: - if 'prediction_horizon' in list(self.params['passed_data'].keys()): - if self.params['passed_data']['prediction_horizon'] is not None: - self.forecast_dates = self.forecast_dates[0:self.params['passed_data']['prediction_horizon']] - - - def get_weather_forecast(self, method: Optional[str] = 'scrapper', - csv_path: Optional[str] = "data_weather_forecast.csv") -> pd.DataFrame: + if "prediction_horizon" in list(self.params["passed_data"].keys()): + if self.params["passed_data"]["prediction_horizon"] is not None: + self.forecast_dates = self.forecast_dates[ + 0 : self.params["passed_data"]["prediction_horizon"] + ] + + def get_weather_forecast( + self, + method: Optional[str] = "scrapper", + csv_path: Optional[str] = "data_weather_forecast.csv", + ) -> pd.DataFrame: r""" Get and generate weather forecast data. @@ -183,230 +214,360 @@ def get_weather_forecast(self, method: Optional[str] = 'scrapper', :rtype: pd.DataFrame """ - csv_path = self.emhass_conf['data_path'] / csv_path - w_forecast_cache_path = os.path.abspath(self.emhass_conf['data_path'] / "weather_forecast_data.pkl") - - self.logger.info("Retrieving weather forecast data using method = "+method) - self.weather_forecast_method = method # Saving this attribute for later use to identify csv method usage - if method == 'scrapper': - freq_scrap = pd.to_timedelta(60, "minutes") # The scrapping time step is 60min on clearoutside - forecast_dates_scrap = pd.date_range(start=self.start_forecast, - end=self.end_forecast-freq_scrap, - freq=freq_scrap, tz=self.time_zone).tz_convert('utc').round(freq_scrap, ambiguous='infer', nonexistent='shift_forward').tz_convert(self.time_zone) + csv_path = self.emhass_conf["data_path"] / csv_path + w_forecast_cache_path = os.path.abspath( + self.emhass_conf["data_path"] / "weather_forecast_data.pkl" + ) + + self.logger.info("Retrieving weather forecast data using method = " + method) + self.weather_forecast_method = ( + method # Saving this attribute for later use to identify csv method usage + ) + if method == "scrapper": + freq_scrap = pd.to_timedelta( + 60, "minutes" + ) # The scrapping time step is 60min on clearoutside + forecast_dates_scrap = ( + pd.date_range( + start=self.start_forecast, + end=self.end_forecast - freq_scrap, + freq=freq_scrap, + tz=self.time_zone, + ) + .tz_convert("utc") + .round(freq_scrap, ambiguous="infer", nonexistent="shift_forward") + .tz_convert(self.time_zone) + ) # Using the clearoutside webpage - response = get("https://clearoutside.com/forecast/"+str(round(self.lat, 2))+"/"+str(round(self.lon, 2))+"?desktop=true") - '''import bz2 # Uncomment to save a serialized data for tests + response = get( + "https://clearoutside.com/forecast/" + + str(round(self.lat, 2)) + + "/" + + str(round(self.lon, 2)) + + "?desktop=true" + ) + """import bz2 # Uncomment to save a serialized data for tests import _pickle as cPickle with bz2.BZ2File("data/test_response_scrapper_get_method.pbz2", "w") as f: - cPickle.dump(response.content, f)''' - soup = BeautifulSoup(response.content, 'html.parser') - table = soup.find_all(id='day_0')[0] - list_names = table.find_all(class_='fc_detail_label') - list_tables = table.find_all('ul')[1:] - selected_cols = [0, 1, 2, 3, 10, 12, 15] # Selected variables + cPickle.dump(response.content, f)""" + soup = BeautifulSoup(response.content, "html.parser") + table = soup.find_all(id="day_0")[0] + list_names = table.find_all(class_="fc_detail_label") + list_tables = table.find_all("ul")[1:] + selected_cols = [0, 1, 2, 3, 10, 12, 15] # Selected variables col_names = [list_names[i].get_text() for i in selected_cols] list_tables = [list_tables[i] for i in selected_cols] # Building the raw DF container - raw_data = pd.DataFrame(index=range(len(forecast_dates_scrap)), columns=col_names, dtype=float) + raw_data = pd.DataFrame( + index=range(len(forecast_dates_scrap)), columns=col_names, dtype=float + ) for count_col, col in enumerate(col_names): - list_rows = list_tables[count_col].find_all('li') + list_rows = list_tables[count_col].find_all("li") for count_row, row in enumerate(list_rows): raw_data.loc[count_row, col] = float(row.get_text()) # Treating index raw_data.set_index(forecast_dates_scrap, inplace=True) - raw_data = raw_data[~raw_data.index.duplicated(keep='first')] + raw_data = raw_data[~raw_data.index.duplicated(keep="first")] raw_data = raw_data.reindex(self.forecast_dates) - raw_data.interpolate(method='linear', axis=0, limit=None, - limit_direction='both', inplace=True) + raw_data.interpolate( + method="linear", + axis=0, + limit=None, + limit_direction="both", + inplace=True, + ) # Converting the cloud cover into Global Horizontal Irradiance with a PVLib method - ghi_est = self.cloud_cover_to_irradiance(raw_data['Total Clouds (% Sky Obscured)']) + ghi_est = self.cloud_cover_to_irradiance( + raw_data["Total Clouds (% Sky Obscured)"] + ) data = ghi_est - data['temp_air'] = raw_data['Temperature (°C)'] - data['wind_speed'] = raw_data['Wind Speed/Direction (mph)']*1.60934 # conversion to km/h - data['relative_humidity'] = raw_data['Relative Humidity (%)'] - data['precipitable_water'] = pvlib.atmosphere.gueymard94_pw( - data['temp_air'], data['relative_humidity']) - elif method == 'solcast': # using Solcast API + data["temp_air"] = raw_data["Temperature (°C)"] + data["wind_speed"] = ( + raw_data["Wind Speed/Direction (mph)"] * 1.60934 + ) # conversion to km/h + data["relative_humidity"] = raw_data["Relative Humidity (%)"] + data["precipitable_water"] = pvlib.atmosphere.gueymard94_pw( + data["temp_air"], data["relative_humidity"] + ) + elif method == "solcast": # using Solcast API # Check if weather_forecast_cache is true or if forecast_data file does not exist if not os.path.isfile(w_forecast_cache_path): # Check if weather_forecast_cache_only is true, if so produce error for not finding cache file - if not self.params["passed_data"].get("weather_forecast_cache_only",False): + if not self.params["passed_data"].get( + "weather_forecast_cache_only", False + ): # Retrieve data from the Solcast API - if 'solcast_api_key' not in self.retrieve_hass_conf: - self.logger.error("The solcast_api_key parameter was not defined") + if "solcast_api_key" not in self.retrieve_hass_conf: + self.logger.error( + "The solcast_api_key parameter was not defined" + ) return False - if 'solcast_rooftop_id' not in self.retrieve_hass_conf: - self.logger.error("The solcast_rooftop_id parameter was not defined") + if "solcast_rooftop_id" not in self.retrieve_hass_conf: + self.logger.error( + "The solcast_rooftop_id parameter was not defined" + ) return False headers = { - 'User-Agent': 'EMHASS', - "Authorization": "Bearer " + self.retrieve_hass_conf['solcast_api_key'], + "User-Agent": "EMHASS", + "Authorization": "Bearer " + + self.retrieve_hass_conf["solcast_api_key"], "content-type": "application/json", - } - days_solcast = int(len(self.forecast_dates)*self.freq.seconds/3600) - # If weather_forecast_cache, set request days as twice as long to avoid length issues (add a buffer) - if self.params["passed_data"].get("weather_forecast_cache",False): + } + days_solcast = int( + len(self.forecast_dates) * self.freq.seconds / 3600 + ) + # If weather_forecast_cache, set request days as twice as long to avoid length issues (add a buffer) + if self.params["passed_data"].get("weather_forecast_cache", False): days_solcast = min((days_solcast * 2), 336) - url = "https://api.solcast.com.au/rooftop_sites/"+self.retrieve_hass_conf['solcast_rooftop_id']+"/forecasts?hours="+str(days_solcast) + url = ( + "https://api.solcast.com.au/rooftop_sites/" + + self.retrieve_hass_conf["solcast_rooftop_id"] + + "/forecasts?hours=" + + str(days_solcast) + ) response = get(url, headers=headers) - '''import bz2 # Uncomment to save a serialized data for tests + """import bz2 # Uncomment to save a serialized data for tests import _pickle as cPickle with bz2.BZ2File("data/test_response_solcast_get_method.pbz2", "w") as f: - cPickle.dump(response, f)''' + cPickle.dump(response, f)""" # Verify the request passed if int(response.status_code) == 200: data = response.json() - elif int(response.status_code) == 402 or int(response.status_code) == 429: - self.logger.error("Solcast error: May have exceeded your subscription limit.") - return False - elif int(response.status_code) >= 400 or int(response.status_code) >= 202: - self.logger.error("Solcast error: There was a issue with the solcast request, check solcast API key and rooftop ID.") - self.logger.error("Solcast error: Check that your subscription is valid and your network can connect to Solcast.") - return False + elif ( + int(response.status_code) == 402 + or int(response.status_code) == 429 + ): + self.logger.error( + "Solcast error: May have exceeded your subscription limit." + ) + return False + elif ( + int(response.status_code) >= 400 + or int(response.status_code) >= 202 + ): + self.logger.error( + "Solcast error: There was a issue with the solcast request, check solcast API key and rooftop ID." + ) + self.logger.error( + "Solcast error: Check that your subscription is valid and your network can connect to Solcast." + ) + return False data_list = [] - for elm in data['forecasts']: - data_list.append(elm['pv_estimate']*1000) # Converting kW to W + for elm in data["forecasts"]: + data_list.append( + elm["pv_estimate"] * 1000 + ) # Converting kW to W # Check if the retrieved data has the correct length if len(data_list) < len(self.forecast_dates): - self.logger.error("Not enough data retried from Solcast service, try increasing the time step or use MPC.") + self.logger.error( + "Not enough data retried from Solcast service, try increasing the time step or use MPC." + ) else: # If runtime weather_forecast_cache is true save forecast result to file as cache - if self.params["passed_data"].get("weather_forecast_cache",False): + if self.params["passed_data"].get( + "weather_forecast_cache", False + ): # Add x2 forecast periods for cached results. This adds a extra delta_forecast amount of days for a buffer - cached_forecast_dates = self.forecast_dates.union(pd.date_range(self.forecast_dates[-1], periods=(len(self.forecast_dates) +1), freq=self.freq)[1:]) - cache_data_list = data_list[0:len(cached_forecast_dates)] - cache_data_dict = {'ts':cached_forecast_dates, 'yhat':cache_data_list} + cached_forecast_dates = self.forecast_dates.union( + pd.date_range( + self.forecast_dates[-1], + periods=(len(self.forecast_dates) + 1), + freq=self.freq, + )[1:] + ) + cache_data_list = data_list[0 : len(cached_forecast_dates)] + cache_data_dict = { + "ts": cached_forecast_dates, + "yhat": cache_data_list, + } data_cache = pd.DataFrame.from_dict(cache_data_dict) - data_cache.set_index('ts', inplace=True) - with open(w_forecast_cache_path, "wb") as file: + data_cache.set_index("ts", inplace=True) + with open(w_forecast_cache_path, "wb") as file: cPickle.dump(data_cache, file) if not os.path.isfile(w_forecast_cache_path): - self.logger.warning("Solcast forecast data could not be saved to file.") + self.logger.warning( + "Solcast forecast data could not be saved to file." + ) else: - self.logger.info("Saved the Solcast results to cache, for later reference.") - # Trim request results to forecast_dates - data_list = data_list[0:len(self.forecast_dates)] - data_dict = {'ts':self.forecast_dates, 'yhat':data_list} + self.logger.info( + "Saved the Solcast results to cache, for later reference." + ) + # Trim request results to forecast_dates + data_list = data_list[0 : len(self.forecast_dates)] + data_dict = {"ts": self.forecast_dates, "yhat": data_list} # Define DataFrame data = pd.DataFrame.from_dict(data_dict) # Define index - data.set_index('ts', inplace=True) + data.set_index("ts", inplace=True) # Else, notify user to update cache else: self.logger.error("Unable to obtain Solcast cache file.") - self.logger.error("Try running optimization again with 'weather_forecast_cache_only': false") - self.logger.error("Optionally, obtain new Solcast cache with runtime parameter 'weather_forecast_cache': true in an optimization, or run the `weather-forecast-cache` action, to pull new data from Solcast and cache.") + self.logger.error( + "Try running optimization again with 'weather_forecast_cache_only': false" + ) + self.logger.error( + "Optionally, obtain new Solcast cache with runtime parameter 'weather_forecast_cache': true in an optimization, or run the `weather-forecast-cache` action, to pull new data from Solcast and cache." + ) return False # Else, open stored weather_forecast_data.pkl file for previous forecast data (cached data) else: with open(w_forecast_cache_path, "rb") as file: data = cPickle.load(file) - if not isinstance(data, pd.DataFrame) or len(data) < len(self.forecast_dates): - self.logger.error("There has been a error obtaining cached Solcast forecast data.") - self.logger.error("Try running optimization again with 'weather_forecast_cache': true, or run action `weather-forecast-cache`, to pull new data from Solcast and cache.") - self.logger.warning("Removing old Solcast cache file. Next optimization will pull data from Solcast, unless 'weather_forecast_cache_only': true") + if not isinstance(data, pd.DataFrame) or len(data) < len( + self.forecast_dates + ): + self.logger.error( + "There has been a error obtaining cached Solcast forecast data." + ) + self.logger.error( + "Try running optimization again with 'weather_forecast_cache': true, or run action `weather-forecast-cache`, to pull new data from Solcast and cache." + ) + self.logger.warning( + "Removing old Solcast cache file. Next optimization will pull data from Solcast, unless 'weather_forecast_cache_only': true" + ) os.remove(w_forecast_cache_path) return False # Filter cached forecast data to match current forecast_dates start-end range (reduce forecast Dataframe size to appropriate length) - if self.forecast_dates[0] in data.index and self.forecast_dates[-1] in data.index: - data = data.loc[self.forecast_dates[0]:self.forecast_dates[-1]] - self.logger.info("Retrieved Solcast data from the previously saved cache.") + if ( + self.forecast_dates[0] in data.index + and self.forecast_dates[-1] in data.index + ): + data = data.loc[ + self.forecast_dates[0] : self.forecast_dates[-1] + ] + self.logger.info( + "Retrieved Solcast data from the previously saved cache." + ) else: - self.logger.error("Unable to obtain cached Solcast forecast data within the requested timeframe range.") - self.logger.error("Try running optimization again (not using cache). Optionally, add runtime parameter 'weather_forecast_cache': true to pull new data from Solcast and cache.") - self.logger.warning("Removing old Solcast cache file. Next optimization will pull data from Solcast, unless 'weather_forecast_cache_only': true") + self.logger.error( + "Unable to obtain cached Solcast forecast data within the requested timeframe range." + ) + self.logger.error( + "Try running optimization again (not using cache). Optionally, add runtime parameter 'weather_forecast_cache': true to pull new data from Solcast and cache." + ) + self.logger.warning( + "Removing old Solcast cache file. Next optimization will pull data from Solcast, unless 'weather_forecast_cache_only': true" + ) os.remove(w_forecast_cache_path) - return False - elif method == 'solar.forecast': # using the solar.forecast API + return False + elif method == "solar.forecast": # using the solar.forecast API # Retrieve data from the solar.forecast API - if 'solar_forecast_kwp' not in self.retrieve_hass_conf: - self.logger.warning("The solar_forecast_kwp parameter was not defined, using dummy values for testing") - self.retrieve_hass_conf['solar_forecast_kwp'] = 5 - if self.retrieve_hass_conf['solar_forecast_kwp'] == 0: - self.logger.warning("The solar_forecast_kwp parameter is set to zero, setting to default 5") - self.retrieve_hass_conf['solar_forecast_kwp'] = 5 - if self.optim_conf['delta_forecast_daily'].days > 1: - self.logger.warning("The free public tier for solar.forecast only provides one day forecasts") - self.logger.warning("Continuing with just the first day of data, the other days are filled with 0.0.") - self.logger.warning("Use the other available methods for delta_forecast_daily > 1") - headers = { - "Accept": "application/json" - } + if "solar_forecast_kwp" not in self.retrieve_hass_conf: + self.logger.warning( + "The solar_forecast_kwp parameter was not defined, using dummy values for testing" + ) + self.retrieve_hass_conf["solar_forecast_kwp"] = 5 + if self.retrieve_hass_conf["solar_forecast_kwp"] == 0: + self.logger.warning( + "The solar_forecast_kwp parameter is set to zero, setting to default 5" + ) + self.retrieve_hass_conf["solar_forecast_kwp"] = 5 + if self.optim_conf["delta_forecast_daily"].days > 1: + self.logger.warning( + "The free public tier for solar.forecast only provides one day forecasts" + ) + self.logger.warning( + "Continuing with just the first day of data, the other days are filled with 0.0." + ) + self.logger.warning( + "Use the other available methods for delta_forecast_daily > 1" + ) + headers = {"Accept": "application/json"} data = pd.DataFrame() - for i in range(len(self.plant_conf['pv_module_model'])): - url = "https://api.forecast.solar/estimate/"+str(round(self.lat, 2))+"/"+str(round(self.lon, 2))+\ - "/"+str(self.plant_conf['surface_tilt'][i])+"/"+str(self.plant_conf['surface_azimuth'][i]-180)+\ - "/"+str(self.retrieve_hass_conf["solar_forecast_kwp"]) + for i in range(len(self.plant_conf["pv_module_model"])): + url = ( + "https://api.forecast.solar/estimate/" + + str(round(self.lat, 2)) + + "/" + + str(round(self.lon, 2)) + + "/" + + str(self.plant_conf["surface_tilt"][i]) + + "/" + + str(self.plant_conf["surface_azimuth"][i] - 180) + + "/" + + str(self.retrieve_hass_conf["solar_forecast_kwp"]) + ) response = get(url, headers=headers) - '''import bz2 # Uncomment to save a serialized data for tests + """import bz2 # Uncomment to save a serialized data for tests import _pickle as cPickle with bz2.BZ2File("data/test_response_solarforecast_get_method.pbz2", "w") as f: - cPickle.dump(response.json(), f)''' + cPickle.dump(response.json(), f)""" data_raw = response.json() - data_dict = {'ts':list(data_raw['result']['watts'].keys()), 'yhat':list(data_raw['result']['watts'].values())} + data_dict = { + "ts": list(data_raw["result"]["watts"].keys()), + "yhat": list(data_raw["result"]["watts"].values()), + } # Form the final DataFrame data_tmp = pd.DataFrame.from_dict(data_dict) - data_tmp.set_index('ts', inplace=True) + data_tmp.set_index("ts", inplace=True) data_tmp.index = pd.to_datetime(data_tmp.index) data_tmp = data_tmp.tz_localize(self.forecast_dates.tz) data_tmp = data_tmp.reindex(index=self.forecast_dates) - mask_up_data_df = data_tmp.copy(deep=True).fillna(method = "ffill").isnull() - mask_down_data_df = data_tmp.copy(deep=True).fillna(method = "bfill").isnull() - data_tmp.loc[data_tmp.index[mask_up_data_df['yhat']==True],:] = 0.0 - data_tmp.loc[data_tmp.index[mask_down_data_df['yhat']==True],:] = 0.0 + mask_up_data_df = ( + data_tmp.copy(deep=True).fillna(method="ffill").isnull() + ) + mask_down_data_df = ( + data_tmp.copy(deep=True).fillna(method="bfill").isnull() + ) + data_tmp.loc[data_tmp.index[mask_up_data_df["yhat"] == True], :] = 0.0 + data_tmp.loc[data_tmp.index[mask_down_data_df["yhat"] == True], :] = 0.0 data_tmp.interpolate(inplace=True, limit=1) data_tmp = data_tmp.fillna(0.0) if len(data) == 0: data = copy.deepcopy(data_tmp) else: data = data + data_tmp - elif method == 'csv': # reading from a csv file + elif method == "csv": # reading from a csv file weather_csv_file_path = csv_path # Loading the csv file, we will consider that this is the PV power in W - data = pd.read_csv(weather_csv_file_path, header=None, names=['ts', 'yhat']) - # Check if the passed data has the correct length + data = pd.read_csv(weather_csv_file_path, header=None, names=["ts", "yhat"]) + # Check if the passed data has the correct length if len(data) < len(self.forecast_dates): self.logger.error("Passed data from CSV is not long enough") else: # Ensure correct length - data = data.loc[data.index[0:len(self.forecast_dates)],:] + data = data.loc[data.index[0 : len(self.forecast_dates)], :] # Define index data.index = self.forecast_dates - data.drop('ts', axis=1, inplace=True) + data.drop("ts", axis=1, inplace=True) data = data.copy().loc[self.forecast_dates] - elif method == 'list': # reading a list of values + elif method == "list": # reading a list of values # Loading data from passed list - data_list = self.params['passed_data']['pv_power_forecast'] + data_list = self.params["passed_data"]["pv_power_forecast"] # Check if the passed data has the correct length - if len(data_list) < len(self.forecast_dates) and self.params['passed_data']['prediction_horizon'] is None: + if ( + len(data_list) < len(self.forecast_dates) + and self.params["passed_data"]["prediction_horizon"] is None + ): self.logger.error("Passed data from passed list is not long enough") else: # Ensure correct length - data_list = data_list[0:len(self.forecast_dates)] + data_list = data_list[0 : len(self.forecast_dates)] # Define DataFrame - data_dict = {'ts':self.forecast_dates, 'yhat':data_list} + data_dict = {"ts": self.forecast_dates, "yhat": data_list} data = pd.DataFrame.from_dict(data_dict) # Define index - data.set_index('ts', inplace=True) + data.set_index("ts", inplace=True) else: self.logger.error("Method %r is not valid", method) data = None return data - - def cloud_cover_to_irradiance(self, cloud_cover: pd.Series, - offset:Optional[int] = 35) -> pd.DataFrame: + + def cloud_cover_to_irradiance( + self, cloud_cover: pd.Series, offset: Optional[int] = 35 + ) -> pd.DataFrame: """ Estimates irradiance from cloud cover in the following steps. - + 1. Determine clear sky GHI using Ineichen model and climatological turbidity. - + 2. Estimate cloudy sky GHI using a function of cloud_cover - + 3. Estimate cloudy sky DNI using the DISC model. - + 4. Calculate DHI from DNI and GHI. - + (This function was copied and modified from PVLib) :param cloud_cover: Cloud cover in %. @@ -418,21 +579,27 @@ def cloud_cover_to_irradiance(self, cloud_cover: pd.Series, """ location = Location(latitude=self.lat, longitude=self.lon) solpos = location.get_solarposition(cloud_cover.index) - cs = location.get_clearsky(cloud_cover.index, model='ineichen', - solar_position=solpos) + cs = location.get_clearsky( + cloud_cover.index, model="ineichen", solar_position=solpos + ) # Using only the linear method - offset = offset / 100. - cloud_cover_unit = copy.deepcopy(cloud_cover) / 100. - ghi = (offset + (1 - offset) * (1 - cloud_cover_unit)) * cs['ghi'] + offset = offset / 100.0 + cloud_cover_unit = copy.deepcopy(cloud_cover) / 100.0 + ghi = (offset + (1 - offset) * (1 - cloud_cover_unit)) * cs["ghi"] # Using disc model - dni = disc(ghi, solpos['zenith'], cloud_cover.index)['dni'] - dhi = ghi - dni * np.cos(np.radians(solpos['zenith'])) - irrads = pd.DataFrame({'ghi': ghi, 'dni': dni, 'dhi': dhi}).fillna(0) + dni = disc(ghi, solpos["zenith"], cloud_cover.index)["dni"] + dhi = ghi - dni * np.cos(np.radians(solpos["zenith"])) + irrads = pd.DataFrame({"ghi": ghi, "dni": dni, "dhi": dhi}).fillna(0) return irrads - + @staticmethod - def get_mix_forecast(df_now: pd.DataFrame, df_forecast: pd.DataFrame, - alpha:float, beta:float, col:str) -> pd.DataFrame: + def get_mix_forecast( + df_now: pd.DataFrame, + df_forecast: pd.DataFrame, + alpha: float, + beta: float, + col: str, + ) -> pd.DataFrame: """A simple correction method for forecasted data using the current real values of a variable. :param df_now: The DataFrame containing the current/real values @@ -448,13 +615,16 @@ def get_mix_forecast(df_now: pd.DataFrame, df_forecast: pd.DataFrame, :return: The output DataFrame with the corrected values :rtype: pd.DataFrame """ - first_fcst = alpha*df_forecast.iloc[0] + beta*df_now[col].iloc[-1] + first_fcst = alpha * df_forecast.iloc[0] + beta * df_now[col].iloc[-1] df_forecast.iloc[0] = first_fcst return df_forecast - - def get_power_from_weather(self, df_weather: pd.DataFrame, - set_mix_forecast:Optional[bool] = False, - df_now:Optional[pd.DataFrame] = pd.DataFrame()) -> pd.Series: + + def get_power_from_weather( + self, + df_weather: pd.DataFrame, + set_mix_forecast: Optional[bool] = False, + df_now: Optional[pd.DataFrame] = pd.DataFrame(), + ) -> pd.Series: r""" Convert wheater forecast data into electrical power. @@ -471,36 +641,55 @@ def get_power_from_weather(self, df_weather: pd.DataFrame, """ # If using csv method we consider that yhat is the PV power in W - if "solar_forecast_kwp" in self.retrieve_hass_conf.keys() and self.retrieve_hass_conf["solar_forecast_kwp"] == 0: + if ( + "solar_forecast_kwp" in self.retrieve_hass_conf.keys() + and self.retrieve_hass_conf["solar_forecast_kwp"] == 0 + ): P_PV_forecast = pd.Series(0, index=df_weather.index) else: - if self.weather_forecast_method == 'solcast' or self.weather_forecast_method == 'solar.forecast' or \ - self.weather_forecast_method == 'csv' or self.weather_forecast_method == 'list': - P_PV_forecast = df_weather['yhat'] + if ( + self.weather_forecast_method == "solcast" + or self.weather_forecast_method == "solar.forecast" + or self.weather_forecast_method == "csv" + or self.weather_forecast_method == "list" + ): + P_PV_forecast = df_weather["yhat"] P_PV_forecast.name = None - else: # We will transform the weather data into electrical power + else: # We will transform the weather data into electrical power # Transform to power (Watts) # Setting the main parameters of the PV plant location = Location(latitude=self.lat, longitude=self.lon) - temp_params = TEMPERATURE_MODEL_PARAMETERS['sapm']['close_mount_glass_glass'] - cec_modules = bz2.BZ2File(self.emhass_conf['root_path'] / 'data' / 'cec_modules.pbz2', "rb") + temp_params = TEMPERATURE_MODEL_PARAMETERS["sapm"][ + "close_mount_glass_glass" + ] + cec_modules = bz2.BZ2File( + self.emhass_conf["root_path"] / "data" / "cec_modules.pbz2", "rb" + ) cec_modules = cPickle.load(cec_modules) - cec_inverters = bz2.BZ2File(self.emhass_conf['root_path'] / 'data' / 'cec_inverters.pbz2', "rb") + cec_inverters = bz2.BZ2File( + self.emhass_conf["root_path"] / "data" / "cec_inverters.pbz2", "rb" + ) cec_inverters = cPickle.load(cec_inverters) - if type(self.plant_conf['pv_module_model']) == list: + if type(self.plant_conf["pv_module_model"]) == list: P_PV_forecast = pd.Series(0, index=df_weather.index) - for i in range(len(self.plant_conf['pv_module_model'])): + for i in range(len(self.plant_conf["pv_module_model"])): # Selecting correct module and inverter - module = cec_modules[self.plant_conf['pv_module_model'][i]] - inverter = cec_inverters[self.plant_conf['pv_inverter_model'][i]] + module = cec_modules[self.plant_conf["pv_module_model"][i]] + inverter = cec_inverters[ + self.plant_conf["pv_inverter_model"][i] + ] # Building the PV system in PVLib - system = PVSystem(surface_tilt=self.plant_conf['surface_tilt'][i], - surface_azimuth=self.plant_conf['surface_azimuth'][i], - module_parameters=module, - inverter_parameters=inverter, - temperature_model_parameters=temp_params, - modules_per_string=self.plant_conf['modules_per_string'][i], - strings_per_inverter=self.plant_conf['strings_per_inverter'][i]) + system = PVSystem( + surface_tilt=self.plant_conf["surface_tilt"][i], + surface_azimuth=self.plant_conf["surface_azimuth"][i], + module_parameters=module, + inverter_parameters=inverter, + temperature_model_parameters=temp_params, + modules_per_string=self.plant_conf["modules_per_string"][i], + strings_per_inverter=self.plant_conf[ + "strings_per_inverter" + ][i], + ) mc = ModelChain(system, location, aoi_model="physical") # Run the model on the weather DF indexes mc.run_model(df_weather) @@ -508,16 +697,18 @@ def get_power_from_weather(self, df_weather: pd.DataFrame, P_PV_forecast = P_PV_forecast + mc.results.ac else: # Selecting correct module and inverter - module = cec_modules[self.plant_conf['pv_module_model']] - inverter = cec_inverters[self.plant_conf['pv_inverter_model']] + module = cec_modules[self.plant_conf["pv_module_model"]] + inverter = cec_inverters[self.plant_conf["pv_inverter_model"]] # Building the PV system in PVLib - system = PVSystem(surface_tilt=self.plant_conf['surface_tilt'], - surface_azimuth=self.plant_conf['surface_azimuth'], - module_parameters=module, - inverter_parameters=inverter, - temperature_model_parameters=temp_params, - modules_per_string=self.plant_conf['modules_per_string'], - strings_per_inverter=self.plant_conf['strings_per_inverter']) + system = PVSystem( + surface_tilt=self.plant_conf["surface_tilt"], + surface_azimuth=self.plant_conf["surface_azimuth"], + module_parameters=module, + inverter_parameters=inverter, + temperature_model_parameters=temp_params, + modules_per_string=self.plant_conf["modules_per_string"], + strings_per_inverter=self.plant_conf["strings_per_inverter"], + ) mc = ModelChain(system, location, aoi_model="physical") # Run the model on the weather DF indexes mc.run_model(df_weather) @@ -525,47 +716,80 @@ def get_power_from_weather(self, df_weather: pd.DataFrame, P_PV_forecast = mc.results.ac if set_mix_forecast: P_PV_forecast = Forecast.get_mix_forecast( - df_now, P_PV_forecast, - self.params['passed_data']['alpha'], self.params['passed_data']['beta'], self.var_PV) + df_now, + P_PV_forecast, + self.params["passed_data"]["alpha"], + self.params["passed_data"]["beta"], + self.var_PV, + ) return P_PV_forecast - + def get_forecast_days_csv(self, timedelta_days: Optional[int] = 1) -> pd.date_range: r""" Get the date range vector of forecast dates that will be used when loading a CSV file. - + :return: The forecast dates vector :rtype: pd.date_range """ - start_forecast_csv = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0) - if self.method_ts_round == 'nearest': - start_forecast_csv = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0) - elif self.method_ts_round == 'first': - start_forecast_csv = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0).floor(freq=self.freq) - elif self.method_ts_round == 'last': - start_forecast_csv = pd.Timestamp(datetime.now(), tz=self.time_zone).replace(microsecond=0).ceil(freq=self.freq) + start_forecast_csv = pd.Timestamp(datetime.now(), tz=self.time_zone).replace( + microsecond=0 + ) + if self.method_ts_round == "nearest": + start_forecast_csv = pd.Timestamp( + datetime.now(), tz=self.time_zone + ).replace(microsecond=0) + elif self.method_ts_round == "first": + start_forecast_csv = ( + pd.Timestamp(datetime.now(), tz=self.time_zone) + .replace(microsecond=0) + .floor(freq=self.freq) + ) + elif self.method_ts_round == "last": + start_forecast_csv = ( + pd.Timestamp(datetime.now(), tz=self.time_zone) + .replace(microsecond=0) + .ceil(freq=self.freq) + ) else: self.logger.error("Wrong method_ts_round passed parameter") - end_forecast_csv = (start_forecast_csv + self.optim_conf['delta_forecast_daily']).replace(microsecond=0) - forecast_dates_csv = pd.date_range(start=start_forecast_csv, - end=end_forecast_csv+timedelta(days=timedelta_days)-self.freq, - freq=self.freq, tz=self.time_zone).tz_convert('utc').round(self.freq, ambiguous='infer', nonexistent='shift_forward').tz_convert(self.time_zone) + end_forecast_csv = ( + start_forecast_csv + self.optim_conf["delta_forecast_daily"] + ).replace(microsecond=0) + forecast_dates_csv = ( + pd.date_range( + start=start_forecast_csv, + end=end_forecast_csv + timedelta(days=timedelta_days) - self.freq, + freq=self.freq, + tz=self.time_zone, + ) + .tz_convert("utc") + .round(self.freq, ambiguous="infer", nonexistent="shift_forward") + .tz_convert(self.time_zone) + ) if self.params is not None: - if 'prediction_horizon' in list(self.params['passed_data'].keys()): - if self.params['passed_data']['prediction_horizon'] is not None: - forecast_dates_csv = forecast_dates_csv[0:self.params['passed_data']['prediction_horizon']] + if "prediction_horizon" in list(self.params["passed_data"].keys()): + if self.params["passed_data"]["prediction_horizon"] is not None: + forecast_dates_csv = forecast_dates_csv[ + 0 : self.params["passed_data"]["prediction_horizon"] + ] return forecast_dates_csv - - def get_forecast_out_from_csv_or_list(self, df_final: pd.DataFrame, forecast_dates_csv: pd.date_range, - csv_path: str, data_list: Optional[list] = None, - list_and_perfect: Optional[bool] = False) -> pd.DataFrame: + + def get_forecast_out_from_csv_or_list( + self, + df_final: pd.DataFrame, + forecast_dates_csv: pd.date_range, + csv_path: str, + data_list: Optional[list] = None, + list_and_perfect: Optional[bool] = False, + ) -> pd.DataFrame: r""" - Get the forecast data as a DataFrame from a CSV file. - - The data contained in the CSV file should be a 24h forecast with the same frequency as - the main 'optimization_time_step' parameter in the configuration file. The timestamp will not be used and + Get the forecast data as a DataFrame from a CSV file. + + The data contained in the CSV file should be a 24h forecast with the same frequency as + the main 'optimization_time_step' parameter in the configuration file. The timestamp will not be used and a new DateTimeIndex is generated to fit the timestamp index of the input data in 'df_final'. - + :param df_final: The DataFrame containing the input data. :type df_final: pd.DataFrame :param forecast_dates_csv: The forecast dates vector @@ -577,10 +801,10 @@ def get_forecast_out_from_csv_or_list(self, df_final: pd.DataFrame, forecast_dat """ if csv_path is None: - data_dict = {'ts':forecast_dates_csv, 'yhat':data_list} + data_dict = {"ts": forecast_dates_csv, "yhat": data_list} df_csv = pd.DataFrame.from_dict(data_dict) df_csv.index = forecast_dates_csv - df_csv.drop(['ts'], axis=1, inplace=True) + df_csv.drop(["ts"], axis=1, inplace=True) df_csv = set_df_index_freq(df_csv) if list_and_perfect: days_list = df_final.index.day.unique().tolist() @@ -588,11 +812,11 @@ def get_forecast_out_from_csv_or_list(self, df_final: pd.DataFrame, forecast_dat days_list = df_csv.index.day.unique().tolist() else: if not os.path.exists(csv_path): - csv_path = self.emhass_conf['data_path'] / csv_path - load_csv_file_path = csv_path - df_csv = pd.read_csv(load_csv_file_path, header=None, names=['ts', 'yhat']) + csv_path = self.emhass_conf["data_path"] / csv_path + load_csv_file_path = csv_path + df_csv = pd.read_csv(load_csv_file_path, header=None, names=["ts", "yhat"]) df_csv.index = forecast_dates_csv - df_csv.drop(['ts'], axis=1, inplace=True) + df_csv.drop(["ts"], axis=1, inplace=True) df_csv = set_df_index_freq(df_csv) days_list = df_final.index.day.unique().tolist() forecast_out = pd.DataFrame() @@ -606,47 +830,73 @@ def get_forecast_out_from_csv_or_list(self, df_final: pd.DataFrame, forecast_dat df_tmp = copy.deepcopy(df_final) first_elm_index = [i for i, x in enumerate(df_tmp.index.day == day) if x][0] last_elm_index = [i for i, x in enumerate(df_tmp.index.day == day) if x][-1] - fcst_index = pd.date_range(start=df_tmp.index[first_elm_index], - end=df_tmp.index[last_elm_index], - freq=df_tmp.index.freq) - first_hour = str(df_tmp.index[first_elm_index].hour)+":"+str(df_tmp.index[first_elm_index].minute) - last_hour = str(df_tmp.index[last_elm_index].hour)+":"+str(df_tmp.index[last_elm_index].minute) + fcst_index = pd.date_range( + start=df_tmp.index[first_elm_index], + end=df_tmp.index[last_elm_index], + freq=df_tmp.index.freq, + ) + first_hour = ( + str(df_tmp.index[first_elm_index].hour) + + ":" + + str(df_tmp.index[first_elm_index].minute) + ) + last_hour = ( + str(df_tmp.index[last_elm_index].hour) + + ":" + + str(df_tmp.index[last_elm_index].minute) + ) if len(forecast_out) == 0: if csv_path is None: if list_and_perfect: forecast_out = pd.DataFrame( df_csv.between_time(first_hour, last_hour).values, - index=fcst_index) + index=fcst_index, + ) else: forecast_out = pd.DataFrame( - df_csv.loc[fcst_index,:].between_time(first_hour, last_hour).values, - index=fcst_index) + df_csv.loc[fcst_index, :] + .between_time(first_hour, last_hour) + .values, + index=fcst_index, + ) else: forecast_out = pd.DataFrame( df_csv.between_time(first_hour, last_hour).values, - index=fcst_index) + index=fcst_index, + ) else: if csv_path is None: if list_and_perfect: forecast_tp = pd.DataFrame( df_csv.between_time(first_hour, last_hour).values, - index=fcst_index) + index=fcst_index, + ) else: forecast_tp = pd.DataFrame( - df_csv.loc[fcst_index,:].between_time(first_hour, last_hour).values, - index=fcst_index) + df_csv.loc[fcst_index, :] + .between_time(first_hour, last_hour) + .values, + index=fcst_index, + ) else: forecast_tp = pd.DataFrame( df_csv.between_time(first_hour, last_hour).values, - index=fcst_index) + index=fcst_index, + ) forecast_out = pd.concat([forecast_out, forecast_tp], axis=0) return forecast_out - - def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: Optional[str] = 'naive', - csv_path: Optional[str] = "data_load_forecast.csv", - set_mix_forecast:Optional[bool] = False, df_now:Optional[pd.DataFrame] = pd.DataFrame(), - use_last_window: Optional[bool] = True, mlf: Optional[MLForecaster] = None, - debug: Optional[bool] = False) -> pd.Series: + + def get_load_forecast( + self, + days_min_load_forecast: Optional[int] = 3, + method: Optional[str] = "naive", + csv_path: Optional[str] = "data_load_forecast.csv", + set_mix_forecast: Optional[bool] = False, + df_now: Optional[pd.DataFrame] = pd.DataFrame(), + use_last_window: Optional[bool] = True, + mlf: Optional[MLForecaster] = None, + debug: Optional[bool] = False, + ) -> pd.Series: r""" Get and generate the load forecast data. @@ -681,118 +931,165 @@ def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: O :rtype: pd.DataFrame """ - csv_path = self.emhass_conf['data_path'] / csv_path - - if method == 'naive' or method == 'mlforecaster': # retrieving needed data for these methods - self.logger.info("Retrieving data from hass for load forecast using method = "+method) + csv_path = self.emhass_conf["data_path"] / csv_path + + if ( + method == "naive" or method == "mlforecaster" + ): # retrieving needed data for these methods + self.logger.info( + "Retrieving data from hass for load forecast using method = " + method + ) var_list = [self.var_load] var_replace_zero = None var_interp = [self.var_load] time_zone_load_foreacast = None # We will need to retrieve a new set of load data according to the days_min_load_forecast parameter - rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.freq, time_zone_load_foreacast, self.params, self.emhass_conf, self.logger) + rh = RetrieveHass( + self.retrieve_hass_conf["hass_url"], + self.retrieve_hass_conf["long_lived_token"], + self.freq, + time_zone_load_foreacast, + self.params, + self.emhass_conf, + self.logger, + ) if self.get_data_from_file: - filename_path = self.emhass_conf['data_path'] / 'test_df_final.pkl' - with open(filename_path, 'rb') as inp: + filename_path = self.emhass_conf["data_path"] / "test_df_final.pkl" + with open(filename_path, "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) self.var_load = var_list[0] - self.retrieve_hass_conf['sensor_power_load_no_var_loads'] = self.var_load + self.retrieve_hass_conf["sensor_power_load_no_var_loads"] = ( + self.var_load + ) var_interp = [var_list[0]] self.var_list = [var_list[0]] - self.var_load_new = self.var_load+'_positive' + self.var_load_new = self.var_load + "_positive" else: - days_list = get_days_list(days_min_load_forecast) + days_list = get_days_list(days_min_load_forecast) if not rh.get_data(days_list, var_list): return False - if not rh.prepare_data( - self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = self.retrieve_hass_conf['load_negative'], - set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = var_replace_zero, var_interp = var_interp): + if not rh.prepare_data( + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=self.retrieve_hass_conf["load_negative"], + set_zero_min=self.retrieve_hass_conf["set_zero_min"], + var_replace_zero=var_replace_zero, + var_interp=var_interp, + ): return False df = rh.df_final.copy()[[self.var_load_new]] - if method == 'naive': # using a naive approach - mask_forecast_out = (df.index > days_list[-1] - self.optim_conf['delta_forecast_daily']) + if method == "naive": # using a naive approach + mask_forecast_out = ( + df.index > days_list[-1] - self.optim_conf["delta_forecast_daily"] + ) forecast_out = df.copy().loc[mask_forecast_out] - forecast_out = forecast_out.rename(columns={self.var_load_new: 'yhat'}) + forecast_out = forecast_out.rename(columns={self.var_load_new: "yhat"}) # Force forecast_out length to avoid mismatches - forecast_out = forecast_out.iloc[0:len(self.forecast_dates)] + forecast_out = forecast_out.iloc[0 : len(self.forecast_dates)] forecast_out.index = self.forecast_dates - elif method == 'mlforecaster': # using a custom forecast model with machine learning + elif ( + method == "mlforecaster" + ): # using a custom forecast model with machine learning # Load model - model_type = self.params['passed_data']['model_type'] - filename = model_type+'_mlf.pkl' - filename_path = self.emhass_conf['data_path'] / filename + model_type = self.params["passed_data"]["model_type"] + filename = model_type + "_mlf.pkl" + filename_path = self.emhass_conf["data_path"] / filename if not debug: if filename_path.is_file(): - with open(filename_path, 'rb') as inp: + with open(filename_path, "rb") as inp: mlf = pickle.load(inp) else: - self.logger.error("The ML forecaster file was not found, please run a model fit method before this predict method") + self.logger.error( + "The ML forecaster file was not found, please run a model fit method before this predict method" + ) return False # Make predictions if use_last_window: data_last_window = copy.deepcopy(df) - data_last_window = data_last_window.rename(columns={self.var_load_new: self.var_load}) + data_last_window = data_last_window.rename( + columns={self.var_load_new: self.var_load} + ) else: data_last_window = None forecast_out = mlf.predict(data_last_window) # Force forecast length to avoid mismatches - self.logger.debug("Number of ML predict forcast data generated (lags_opt): " + str(len(forecast_out.index))) - self.logger.debug("Number of forcast dates obtained: " + str(len(self.forecast_dates))) + self.logger.debug( + "Number of ML predict forcast data generated (lags_opt): " + + str(len(forecast_out.index)) + ) + self.logger.debug( + "Number of forcast dates obtained: " + str(len(self.forecast_dates)) + ) if len(self.forecast_dates) < len(forecast_out.index): - forecast_out = forecast_out.iloc[0:len(self.forecast_dates)] + forecast_out = forecast_out.iloc[0 : len(self.forecast_dates)] # To be removed once bug is fixed elif len(self.forecast_dates) > len(forecast_out.index): - self.logger.error("Unable to obtain: " + str(len(self.forecast_dates)) + " lags_opt values from sensor: power load no var loads, check optimization_time_step/freq and historic_days_to_retrieve/days_to_retrieve parameters") + self.logger.error( + "Unable to obtain: " + + str(len(self.forecast_dates)) + + " lags_opt values from sensor: power load no var loads, check optimization_time_step/freq and historic_days_to_retrieve/days_to_retrieve parameters" + ) return False # Define DataFrame - data_dict = {'ts':self.forecast_dates, 'yhat':forecast_out.values.tolist()} + data_dict = { + "ts": self.forecast_dates, + "yhat": forecast_out.values.tolist(), + } data = pd.DataFrame.from_dict(data_dict) # Define index - data.set_index('ts', inplace=True) + data.set_index("ts", inplace=True) forecast_out = data.copy().loc[self.forecast_dates] - elif method == 'csv': # reading from a csv file + elif method == "csv": # reading from a csv file load_csv_file_path = csv_path - df_csv = pd.read_csv(load_csv_file_path, header=None, names=['ts', 'yhat']) + df_csv = pd.read_csv(load_csv_file_path, header=None, names=["ts", "yhat"]) if len(df_csv) < len(self.forecast_dates): self.logger.error("Passed data from CSV is not long enough") else: # Ensure correct length - df_csv = df_csv.loc[df_csv.index[0:len(self.forecast_dates)],:] + df_csv = df_csv.loc[df_csv.index[0 : len(self.forecast_dates)], :] # Define index df_csv.index = self.forecast_dates - df_csv.drop(['ts'], axis=1, inplace=True) + df_csv.drop(["ts"], axis=1, inplace=True) forecast_out = df_csv.copy().loc[self.forecast_dates] - elif method == 'list': # reading a list of values + elif method == "list": # reading a list of values # Loading data from passed list - data_list = self.params['passed_data']['load_power_forecast'] + data_list = self.params["passed_data"]["load_power_forecast"] # Check if the passed data has the correct length - if len(data_list) < len(self.forecast_dates) and self.params['passed_data']['prediction_horizon'] is None: + if ( + len(data_list) < len(self.forecast_dates) + and self.params["passed_data"]["prediction_horizon"] is None + ): self.logger.error("Passed data from passed list is not long enough") return False else: # Ensure correct length - data_list = data_list[0:len(self.forecast_dates)] + data_list = data_list[0 : len(self.forecast_dates)] # Define DataFrame - data_dict = {'ts':self.forecast_dates, 'yhat':data_list} + data_dict = {"ts": self.forecast_dates, "yhat": data_list} data = pd.DataFrame.from_dict(data_dict) # Define index - data.set_index('ts', inplace=True) + data.set_index("ts", inplace=True) forecast_out = data.copy().loc[self.forecast_dates] else: self.logger.error("Passed method is not valid") return False - P_Load_forecast = copy.deepcopy(forecast_out['yhat']) + P_Load_forecast = copy.deepcopy(forecast_out["yhat"]) if set_mix_forecast: P_Load_forecast = Forecast.get_mix_forecast( - df_now, P_Load_forecast, - self.params['passed_data']['alpha'], self.params['passed_data']['beta'], self.var_load_new) + df_now, + P_Load_forecast, + self.params["passed_data"]["alpha"], + self.params["passed_data"]["beta"], + self.var_load_new, + ) return P_Load_forecast - - def get_load_cost_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'hp_hc_periods', - csv_path: Optional[str] = "data_load_cost_forecast.csv", - list_and_perfect: Optional[bool] = False) -> pd.DataFrame: + + def get_load_cost_forecast( + self, + df_final: pd.DataFrame, + method: Optional[str] = "hp_hc_periods", + csv_path: Optional[str] = "data_load_cost_forecast.csv", + list_and_perfect: Optional[bool] = False, + ) -> pd.DataFrame: r""" Get the unit cost for the load consumption based on multiple tariff \ periods. This is the cost of the energy from the utility in a vector \ @@ -812,45 +1109,62 @@ def get_load_cost_forecast(self, df_final: pd.DataFrame, method: Optional[str] = :rtype: pd.DataFrame """ - csv_path = self.emhass_conf['data_path'] / csv_path - if method == 'hp_hc_periods': - df_final[self.var_load_cost] = self.optim_conf['load_offpeak_hours_cost'] + csv_path = self.emhass_conf["data_path"] / csv_path + if method == "hp_hc_periods": + df_final[self.var_load_cost] = self.optim_conf["load_offpeak_hours_cost"] list_df_hp = [] - for key, period_hp in self.optim_conf['load_peak_hour_periods'].items(): - list_df_hp.append(df_final[self.var_load_cost].between_time( - period_hp[0]['start'], period_hp[1]['end'])) + for key, period_hp in self.optim_conf["load_peak_hour_periods"].items(): + list_df_hp.append( + df_final[self.var_load_cost].between_time( + period_hp[0]["start"], period_hp[1]["end"] + ) + ) for df_hp in list_df_hp: - df_final.loc[df_hp.index, self.var_load_cost] = self.optim_conf['load_peak_hours_cost'] - elif method == 'csv': + df_final.loc[df_hp.index, self.var_load_cost] = self.optim_conf[ + "load_peak_hours_cost" + ] + elif method == "csv": forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0) forecast_out = self.get_forecast_out_from_csv_or_list( - df_final, forecast_dates_csv, csv_path) + df_final, forecast_dates_csv, csv_path + ) df_final[self.var_load_cost] = forecast_out - elif method == 'list': # reading a list of values + elif method == "list": # reading a list of values # Loading data from passed list - data_list = self.params['passed_data']['load_cost_forecast'] + data_list = self.params["passed_data"]["load_cost_forecast"] # Check if the passed data has the correct length - if len(data_list) < len(self.forecast_dates) and self.params['passed_data']['prediction_horizon'] is None: + if ( + len(data_list) < len(self.forecast_dates) + and self.params["passed_data"]["prediction_horizon"] is None + ): self.logger.error("Passed data from passed list is not long enough") return False else: # Ensure correct length - data_list = data_list[0:len(self.forecast_dates)] + data_list = data_list[0 : len(self.forecast_dates)] # Define the correct dates forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0) forecast_out = self.get_forecast_out_from_csv_or_list( - df_final, forecast_dates_csv, None, data_list=data_list, list_and_perfect=list_and_perfect) + df_final, + forecast_dates_csv, + None, + data_list=data_list, + list_and_perfect=list_and_perfect, + ) # Fill the final DF df_final[self.var_load_cost] = forecast_out else: self.logger.error("Passed method is not valid") return False return df_final - - def get_prod_price_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'constant', - csv_path: Optional[str] = "data_prod_price_forecast.csv", - list_and_perfect: Optional[bool] = False) -> pd.DataFrame: + def get_prod_price_forecast( + self, + df_final: pd.DataFrame, + method: Optional[str] = "constant", + csv_path: Optional[str] = "data_prod_price_forecast.csv", + list_and_perfect: Optional[bool] = False, + ) -> pd.DataFrame: r""" Get the unit power production price for the energy injected to the grid.\ This is the price of the energy injected to the utility in a vector \ @@ -871,31 +1185,42 @@ def get_prod_price_forecast(self, df_final: pd.DataFrame, method: Optional[str] :rtype: pd.DataFrame """ - csv_path = self.emhass_conf['data_path'] / csv_path - if method == 'constant': - df_final[self.var_prod_price] = self.optim_conf['photovoltaic_production_sell_price'] - elif method == 'csv': + csv_path = self.emhass_conf["data_path"] / csv_path + if method == "constant": + df_final[self.var_prod_price] = self.optim_conf[ + "photovoltaic_production_sell_price" + ] + elif method == "csv": forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0) forecast_out = self.get_forecast_out_from_csv_or_list( - df_final, forecast_dates_csv, csv_path) + df_final, forecast_dates_csv, csv_path + ) df_final[self.var_prod_price] = forecast_out - elif method == 'list': # reading a list of values + elif method == "list": # reading a list of values # Loading data from passed list - data_list = self.params['passed_data']['prod_price_forecast'] + data_list = self.params["passed_data"]["prod_price_forecast"] # Check if the passed data has the correct length - if len(data_list) < len(self.forecast_dates) and self.params['passed_data']['prediction_horizon'] is None: + if ( + len(data_list) < len(self.forecast_dates) + and self.params["passed_data"]["prediction_horizon"] is None + ): self.logger.error("Passed data from passed list is not long enough") return False else: # Ensure correct length - data_list = data_list[0:len(self.forecast_dates)] + data_list = data_list[0 : len(self.forecast_dates)] # Define the correct dates forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0) forecast_out = self.get_forecast_out_from_csv_or_list( - df_final, forecast_dates_csv, None, data_list=data_list, list_and_perfect=list_and_perfect) + df_final, + forecast_dates_csv, + None, + data_list=data_list, + list_and_perfect=list_and_perfect, + ) # Fill the final DF df_final[self.var_prod_price] = forecast_out else: self.logger.error("Passed method is not valid") return False - return df_final \ No newline at end of file + return df_final diff --git a/src/emhass/machine_learning_forecaster.py b/src/emhass/machine_learning_forecaster.py index 1863f495..331b015d 100644 --- a/src/emhass/machine_learning_forecaster.py +++ b/src/emhass/machine_learning_forecaster.py @@ -17,8 +17,10 @@ from skforecast.model_selection import bayesian_search_forecaster, backtesting_forecaster, TimeSeriesFold import warnings + warnings.filterwarnings("ignore", category=DeprecationWarning) + class MLForecaster: r""" A forecaster class using machine learning models with auto-regressive approach and features\ @@ -36,8 +38,16 @@ class MLForecaster: """ - def __init__(self, data: pd.DataFrame, model_type: str, var_model: str, sklearn_model: str, - num_lags: int, emhass_conf: dict, logger: logging.Logger) -> None: + def __init__( + self, + data: pd.DataFrame, + model_type: str, + var_model: str, + sklearn_model: str, + num_lags: int, + emhass_conf: dict, + logger: logging.Logger, + ) -> None: r"""Define constructor for the forecast class. :param data: The data that will be used for train/test @@ -71,8 +81,8 @@ def __init__(self, data: pd.DataFrame, model_type: str, var_model: str, sklearn_ # A quick data preparation self.data.index = pd.to_datetime(self.data.index) self.data.sort_index(inplace=True) - self.data = self.data[~self.data.index.duplicated(keep='first')] - + self.data = self.data[~self.data.index.duplicated(keep="first")] + @staticmethod def add_date_features(data: pd.DataFrame) -> pd.DataFrame: """Add date features from the input DataFrame timestamp @@ -83,32 +93,36 @@ def add_date_features(data: pd.DataFrame) -> pd.DataFrame: :rtype: pd.DataFrame """ df = copy.deepcopy(data) - df['year'] = [i.year for i in df.index] - df['month'] = [i.month for i in df.index] - df['day_of_week'] = [i.dayofweek for i in df.index] - df['day_of_year'] = [i.dayofyear for i in df.index] - df['day'] = [i.day for i in df.index] - df['hour'] = [i.hour for i in df.index] + df["year"] = [i.year for i in df.index] + df["month"] = [i.month for i in df.index] + df["day_of_week"] = [i.dayofweek for i in df.index] + df["day_of_year"] = [i.dayofyear for i in df.index] + df["day"] = [i.day for i in df.index] + df["hour"] = [i.hour for i in df.index] return df @staticmethod def neg_r2_score(y_true, y_pred): """The negative of the r2 score.""" return -r2_score(y_true, y_pred) - + @staticmethod def generate_exog(data_last_window, periods, var_name): """Generate the exogenous data for future timestamps.""" - forecast_dates = pd.date_range(start=data_last_window.index[-1]+data_last_window.index.freq, - periods=periods, - freq=data_last_window.index.freq) - exog = pd.DataFrame({var_name:[np.nan]*periods}, - index=forecast_dates) + forecast_dates = pd.date_range( + start=data_last_window.index[-1] + data_last_window.index.freq, + periods=periods, + freq=data_last_window.index.freq, + ) + exog = pd.DataFrame({var_name: [np.nan] * periods}, index=forecast_dates) exog = MLForecaster.add_date_features(exog) return exog - - def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optional[bool] = False - ) -> Tuple[pd.DataFrame, pd.DataFrame]: + + def fit( + self, + split_date_delta: Optional[str] = "48h", + perform_backtest: Optional[bool] = False, + ) -> Tuple[pd.DataFrame, pd.DataFrame]: r"""The fit method to train the ML model. :param split_date_delta: The delta from now to `split_date_delta` that will be used \ @@ -120,27 +134,39 @@ def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optiona :return: The DataFrame containing the forecast data results without and with backtest :rtype: Tuple[pd.DataFrame, pd.DataFrame] """ - self.logger.info("Performing a forecast model fit for "+self.model_type) + self.logger.info("Performing a forecast model fit for " + self.model_type) # Preparing the data: adding exogenous features self.data_exo = pd.DataFrame(index=self.data.index) self.data_exo = MLForecaster.add_date_features(self.data_exo) self.data_exo[self.var_model] = self.data[self.var_model] - self.data_exo = self.data_exo.interpolate(method='linear', axis=0, limit=None) + self.data_exo = self.data_exo.interpolate(method="linear", axis=0, limit=None) # train/test split - self.date_train = self.data_exo.index[-1]-pd.Timedelta('5days')+self.data_exo.index.freq # The last 5 days - self.date_split = self.data_exo.index[-1]-pd.Timedelta(split_date_delta)+self.data_exo.index.freq # The last 48h - self.data_train = self.data_exo.loc[:self.date_split-self.data_exo.index.freq,:] - self.data_test = self.data_exo.loc[self.date_split:,:] + self.date_train = ( + self.data_exo.index[-1] - pd.Timedelta("5days") + self.data_exo.index.freq + ) # The last 5 days + self.date_split = ( + self.data_exo.index[-1] + - pd.Timedelta(split_date_delta) + + self.data_exo.index.freq + ) # The last 48h + self.data_train = self.data_exo.loc[ + : self.date_split - self.data_exo.index.freq, : + ] + self.data_test = self.data_exo.loc[self.date_split :, :] self.steps = len(self.data_test) # Pick correct sklearn model - if self.sklearn_model == 'LinearRegression': + if self.sklearn_model == "LinearRegression": base_model = LinearRegression() - elif self.sklearn_model == 'ElasticNet': + elif self.sklearn_model == "ElasticNet": base_model = ElasticNet() - elif self.sklearn_model == 'KNeighborsRegressor': + elif self.sklearn_model == "KNeighborsRegressor": base_model = KNeighborsRegressor() else: - self.logger.error("Passed sklearn model "+self.sklearn_model+" is not valid. Defaulting to KNeighborsRegressor") + self.logger.error( + "Passed sklearn model " + + self.sklearn_model + + " is not valid. Defaulting to KNeighborsRegressor" + ) base_model = KNeighborsRegressor() # Define the forecaster object self.forecaster = ForecasterRecursive( @@ -148,20 +174,28 @@ def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optiona lags = self.num_lags ) # Fit and time it - self.logger.info("Training a "+self.sklearn_model+" model") + self.logger.info("Training a " + self.sklearn_model + " model") start_time = time.time() - self.forecaster.fit(y=self.data_train[self.var_model], - exog=self.data_train.drop(self.var_model, axis=1)) + self.forecaster.fit( + y=self.data_train[self.var_model], + exog=self.data_train.drop(self.var_model, axis=1), + ) self.logger.info(f"Elapsed time for model fit: {time.time() - start_time}") # Make a prediction to print metrics - predictions = self.forecaster.predict(steps=self.steps, exog=self.data_test.drop(self.var_model, axis=1)) - pred_metric = r2_score(self.data_test[self.var_model],predictions) - self.logger.info(f"Prediction R2 score of fitted model on test data: {pred_metric}") + predictions = self.forecaster.predict( + steps=self.steps, exog=self.data_test.drop(self.var_model, axis=1) + ) + pred_metric = r2_score(self.data_test[self.var_model], predictions) + self.logger.info( + f"Prediction R2 score of fitted model on test data: {pred_metric}" + ) # Packing results in a DataFrame - df_pred = pd.DataFrame(index=self.data_exo.index,columns=['train','test','pred']) - df_pred['train'] = self.data_train[self.var_model] - df_pred['test'] = self.data_test[self.var_model] - df_pred['pred'] = predictions + df_pred = pd.DataFrame( + index=self.data_exo.index, columns=["train", "test", "pred"] + ) + df_pred["train"] = self.data_train[self.var_model] + df_pred["test"] = self.data_test[self.var_model] + df_pred["pred"] = predictions df_pred_backtest = None if perform_backtest is True: # Using backtesting tool to evaluate the model @@ -186,13 +220,14 @@ def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optiona ) self.logger.info(f"Elapsed backtesting time: {time.time() - start_time}") self.logger.info(f"Backtest R2 score: {-metric}") - df_pred_backtest = pd.DataFrame(index=self.data_exo.index,columns=['train','pred']) - df_pred_backtest['train'] = self.data_exo[self.var_model] - df_pred_backtest['pred'] = predictions_backtest + df_pred_backtest = pd.DataFrame( + index=self.data_exo.index, columns=["train", "pred"] + ) + df_pred_backtest["train"] = self.data_exo[self.var_model] + df_pred_backtest["pred"] = predictions_backtest return df_pred, df_pred_backtest - - def predict(self, data_last_window: Optional[pd.DataFrame] = None - ) -> pd.Series: + + def predict(self, data_last_window: Optional[pd.DataFrame] = None) -> pd.Series: """The predict method to generate forecasts from a previously fitted ML model. :param data_last_window: The data that will be used to generate the new forecast, this \ @@ -204,21 +239,33 @@ def predict(self, data_last_window: Optional[pd.DataFrame] = None :rtype: pd.Series """ if data_last_window is None: - predictions = self.forecaster.predict(steps=self.num_lags, exog=self.data_test.drop(self.var_model, axis=1)) + predictions = self.forecaster.predict( + steps=self.num_lags, exog=self.data_test.drop(self.var_model, axis=1) + ) else: - data_last_window = data_last_window.interpolate(method='linear', axis=0, limit=None) + data_last_window = data_last_window.interpolate( + method="linear", axis=0, limit=None + ) if self.is_tuned: - exog = MLForecaster.generate_exog(data_last_window, self.lags_opt, self.var_model) - predictions = self.forecaster.predict(steps=self.lags_opt, - last_window=data_last_window[self.var_model], - exog=exog.drop(self.var_model, axis=1)) + exog = MLForecaster.generate_exog( + data_last_window, self.lags_opt, self.var_model + ) + predictions = self.forecaster.predict( + steps=self.lags_opt, + last_window=data_last_window[self.var_model], + exog=exog.drop(self.var_model, axis=1), + ) else: - exog = MLForecaster.generate_exog(data_last_window, self.num_lags, self.var_model) - predictions = self.forecaster.predict(steps=self.num_lags, - last_window=data_last_window[self.var_model], - exog=exog.drop(self.var_model, axis=1)) + exog = MLForecaster.generate_exog( + data_last_window, self.num_lags, self.var_model + ) + predictions = self.forecaster.predict( + steps=self.num_lags, + last_window=data_last_window[self.var_model], + exog=exog.drop(self.var_model, axis=1), + ) return predictions - + def tune(self, debug: Optional[bool] = False) -> pd.DataFrame: """Tuning a previously fitted model using bayesian optimization. @@ -228,43 +275,76 @@ def tune(self, debug: Optional[bool] = False) -> pd.DataFrame: :rtype: pd.DataFrame """ # Regressor hyperparameters search space - if self.sklearn_model == 'LinearRegression': + if self.sklearn_model == "LinearRegression": if debug: + def search_space(trial): - search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True]), - 'lags': trial.suggest_categorical('lags', [3])} + search_space = { + "fit_intercept": trial.suggest_categorical( + "fit_intercept", [True] + ), + "lags": trial.suggest_categorical("lags", [3]), + } return search_space else: + def search_space(trial): - search_space = {'fit_intercept': trial.suggest_categorical('fit_intercept', [True, False]), - 'lags': trial.suggest_categorical('lags', [6, 12, 24, 36, 48, 60, 72])} + search_space = { + "fit_intercept": trial.suggest_categorical( + "fit_intercept", [True, False] + ), + "lags": trial.suggest_categorical( + "lags", [6, 12, 24, 36, 48, 60, 72] + ), + } return search_space - elif self.sklearn_model == 'ElasticNet': + elif self.sklearn_model == "ElasticNet": if debug: + def search_space(trial): - search_space = {'selection': trial.suggest_categorical('selection', ['random']), - 'lags': trial.suggest_categorical('lags', [3])} + search_space = { + "selection": trial.suggest_categorical("selection", ["random"]), + "lags": trial.suggest_categorical("lags", [3]), + } return search_space else: + def search_space(trial): - search_space = {'alpha': trial.suggest_float('alpha', 0.0, 2.0), - 'l1_ratio': trial.suggest_float('l1_ratio', 0.0, 1.0), - 'selection': trial.suggest_categorical('selection', ['cyclic', 'random']), - 'lags': trial.suggest_categorical('lags', [6, 12, 24, 36, 48, 60, 72])} + search_space = { + "alpha": trial.suggest_float("alpha", 0.0, 2.0), + "l1_ratio": trial.suggest_float("l1_ratio", 0.0, 1.0), + "selection": trial.suggest_categorical( + "selection", ["cyclic", "random"] + ), + "lags": trial.suggest_categorical( + "lags", [6, 12, 24, 36, 48, 60, 72] + ), + } return search_space - elif self.sklearn_model == 'KNeighborsRegressor': + elif self.sklearn_model == "KNeighborsRegressor": if debug: + def search_space(trial): - search_space = {'weights': trial.suggest_categorical('weights', ['uniform']), - 'lags': trial.suggest_categorical('lags', [3])} + search_space = { + "weights": trial.suggest_categorical("weights", ["uniform"]), + "lags": trial.suggest_categorical("lags", [3]), + } return search_space else: + def search_space(trial): - search_space = {'n_neighbors': trial.suggest_int('n_neighbors', 2, 20), - 'leaf_size': trial.suggest_int('leaf_size', 20, 40), - 'weights': trial.suggest_categorical('weights', ['uniform', 'distance']), - 'lags': trial.suggest_categorical('lags', [6, 12, 24, 36, 48, 60, 72])} + search_space = { + "n_neighbors": trial.suggest_int("n_neighbors", 2, 20), + "leaf_size": trial.suggest_int("leaf_size", 20, 40), + "weights": trial.suggest_categorical( + "weights", ["uniform", "distance"] + ), + "lags": trial.suggest_categorical( + "lags", [6, 12, 24, 36, 48, 60, 72] + ), + } return search_space + # Bayesian search hyperparameter and lags with skforecast/optuna # Lags used as predictors if debug: @@ -298,18 +378,28 @@ def search_space(trial): ) self.logger.info(f"Elapsed time: {time.time() - start_time}") self.is_tuned = True - predictions_opt = self.forecaster.predict(steps=self.num_lags, exog=self.data_test.drop(self.var_model, axis=1)) - freq_hours = self.data_exo.index.freq.delta.seconds/3600 - self.lags_opt = int(np.round(len(self.optimize_results.iloc[0]['lags']))) - self.days_needed = int(np.round(self.lags_opt*freq_hours/24)) - df_pred_opt = pd.DataFrame(index=self.data_exo.index,columns=['train','test','pred_optim']) - df_pred_opt['train'] = self.data_train[self.var_model] - df_pred_opt['test'] = self.data_test[self.var_model] - df_pred_opt['pred_optim'] = predictions_opt - pred_optim_metric_train = -self.optimize_results.iloc[0]['neg_r2_score'] - self.logger.info(f"R2 score for optimized prediction in train period: {pred_optim_metric_train}") - pred_optim_metric_test = r2_score(df_pred_opt.loc[predictions_opt.index,'test'], - df_pred_opt.loc[predictions_opt.index,'pred_optim']) - self.logger.info(f"R2 score for optimized prediction in test period: {pred_optim_metric_test}") - self.logger.info("Number of optimal lags obtained: "+str(self.lags_opt)) + predictions_opt = self.forecaster.predict( + steps=self.num_lags, exog=self.data_test.drop(self.var_model, axis=1) + ) + freq_hours = self.data_exo.index.freq.delta.seconds / 3600 + self.lags_opt = int(np.round(len(self.optimize_results.iloc[0]["lags"]))) + self.days_needed = int(np.round(self.lags_opt * freq_hours / 24)) + df_pred_opt = pd.DataFrame( + index=self.data_exo.index, columns=["train", "test", "pred_optim"] + ) + df_pred_opt["train"] = self.data_train[self.var_model] + df_pred_opt["test"] = self.data_test[self.var_model] + df_pred_opt["pred_optim"] = predictions_opt + pred_optim_metric_train = -self.optimize_results.iloc[0]["neg_r2_score"] + self.logger.info( + f"R2 score for optimized prediction in train period: {pred_optim_metric_train}" + ) + pred_optim_metric_test = r2_score( + df_pred_opt.loc[predictions_opt.index, "test"], + df_pred_opt.loc[predictions_opt.index, "pred_optim"], + ) + self.logger.info( + f"R2 score for optimized prediction in test period: {pred_optim_metric_test}" + ) + self.logger.info("Number of optimal lags obtained: " + str(self.lags_opt)) return df_pred_opt diff --git a/src/emhass/machine_learning_regressor.py b/src/emhass/machine_learning_regressor.py index 9e443c6b..fad438b0 100644 --- a/src/emhass/machine_learning_regressor.py +++ b/src/emhass/machine_learning_regressor.py @@ -76,8 +76,16 @@ class MLRegressor: """ - def __init__(self: MLRegressor, data: pd.DataFrame, model_type: str, regression_model: str, - features: list, target: str, timestamp: str, logger: logging.Logger) -> None: + def __init__( + self: MLRegressor, + data: pd.DataFrame, + model_type: str, + regression_model: str, + features: list, + target: str, + timestamp: str, + logger: logging.Logger, + ) -> None: r"""Define constructor for the forecast class. :param data: The data that will be used for train/test @@ -116,7 +124,9 @@ def __init__(self: MLRegressor, data: pd.DataFrame, model_type: str, regression_ self.grid_search = None @staticmethod - def add_date_features(data: pd.DataFrame, date_features: list, timestamp: str) -> pd.DataFrame: + def add_date_features( + data: pd.DataFrame, date_features: list, timestamp: str + ) -> pd.DataFrame: """Add date features from the input DataFrame timestamp. :param data: The input DataFrame @@ -215,16 +225,25 @@ def fit(self: MLRegressor, date_features: list | None = None) -> bool: self.data_exo = self.data_exo.drop(self.target, axis=1) if self.timestamp is not None: self.data_exo = self.data_exo.drop(self.timestamp, axis=1) - X = self.data_exo - X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) + X = self.data_exo + X_train, X_test, y_train, y_test = train_test_split( + X, y, test_size=0.2, random_state=42 + ) self.steps = len(X_test) base_model, param_grid = self.get_regression_model() if base_model is None: return False self.model = make_pipeline(StandardScaler(), base_model) # Create a grid search object - self.grid_search = GridSearchCV(self.model, param_grid, cv=5, scoring="neg_mean_squared_error", - refit=True, verbose=0, n_jobs=-1) + self.grid_search = GridSearchCV( + self.model, + param_grid, + cv=5, + scoring="neg_mean_squared_error", + refit=True, + verbose=0, + n_jobs=-1, + ) # Fit the grid search object to the data self.logger.info("Training a %s model", self.regression_model) start_time = time.time() diff --git a/src/emhass/optimization.py b/src/emhass/optimization.py index 1af1cf00..3f58bfd4 100644 --- a/src/emhass/optimization.py +++ b/src/emhass/optimization.py @@ -1,17 +1,18 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -import logging +import bz2 import copy +import logging import pathlib -import bz2 import pickle as cPickle +from math import ceil from typing import Optional, Tuple -import pandas as pd + import numpy as np +import pandas as pd import pulp as plp -from pulp import PULP_CBC_CMD, COIN_CMD, GLPK_CMD -from math import ceil +from pulp import COIN_CMD, GLPK_CMD, PULP_CBC_CMD class Optimization: @@ -32,10 +33,18 @@ class Optimization: """ - def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, - var_load_cost: str, var_prod_price: str, - costfun: str, emhass_conf: dict, logger: logging.Logger, - opt_time_delta: Optional[int] = 24) -> None: + def __init__( + self, + retrieve_hass_conf: dict, + optim_conf: dict, + plant_conf: dict, + var_load_cost: str, + var_prod_price: str, + costfun: str, + emhass_conf: dict, + logger: logging.Logger, + opt_time_delta: Optional[int] = 24, + ) -> None: r""" Define constructor for Optimization class. @@ -66,40 +75,55 @@ def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, self.retrieve_hass_conf = retrieve_hass_conf self.optim_conf = optim_conf self.plant_conf = plant_conf - self.freq = self.retrieve_hass_conf['optimization_time_step'] - self.time_zone = self.retrieve_hass_conf['time_zone'] - self.timeStep = self.freq.seconds/3600 # in hours - self.time_delta = pd.to_timedelta(opt_time_delta, "hours") # The period of optimization - self.var_PV = self.retrieve_hass_conf['sensor_power_photovoltaics'] - self.var_load = self.retrieve_hass_conf['sensor_power_load_no_var_loads'] - self.var_load_new = self.var_load+'_positive' + self.freq = self.retrieve_hass_conf["optimization_time_step"] + self.time_zone = self.retrieve_hass_conf["time_zone"] + self.timeStep = self.freq.seconds / 3600 # in hours + self.time_delta = pd.to_timedelta( + opt_time_delta, "hours" + ) # The period of optimization + self.var_PV = self.retrieve_hass_conf["sensor_power_photovoltaics"] + self.var_load = self.retrieve_hass_conf["sensor_power_load_no_var_loads"] + self.var_load_new = self.var_load + "_positive" self.costfun = costfun # self.emhass_conf = emhass_conf self.logger = logger self.var_load_cost = var_load_cost self.var_prod_price = var_prod_price self.optim_status = None - if 'lp_solver' in optim_conf.keys(): - self.lp_solver = optim_conf['lp_solver'] + if "lp_solver" in optim_conf.keys(): + self.lp_solver = optim_conf["lp_solver"] else: - self.lp_solver = 'default' - if 'lp_solver_path' in optim_conf.keys(): - self.lp_solver_path = optim_conf['lp_solver_path'] + self.lp_solver = "default" + if "lp_solver_path" in optim_conf.keys(): + self.lp_solver_path = optim_conf["lp_solver_path"] else: - self.lp_solver_path = 'empty' - if self.lp_solver != 'COIN_CMD' and self.lp_solver_path != 'empty': - self.logger.error("Use COIN_CMD solver name if you want to set a path for the LP solver") - if self.lp_solver == 'COIN_CMD' and self.lp_solver_path == 'empty': #if COIN_CMD but lp_solver_path is empty - self.logger.warning("lp_solver=COIN_CMD but lp_solver_path=empty, attempting to use lp_solver_path=/usr/bin/cbc") - self.lp_solver_path = '/usr/bin/cbc' - - def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: np.array, - unit_load_cost: np.array, unit_prod_price: np.array, - soc_init: Optional[float] = None, soc_final: Optional[float] = None, - def_total_hours: Optional[list] = None, - def_start_timestep: Optional[list] = None, - def_end_timestep: Optional[list] = None, - debug: Optional[bool] = False) -> pd.DataFrame: + self.lp_solver_path = "empty" + if self.lp_solver != "COIN_CMD" and self.lp_solver_path != "empty": + self.logger.error( + "Use COIN_CMD solver name if you want to set a path for the LP solver" + ) + if ( + self.lp_solver == "COIN_CMD" and self.lp_solver_path == "empty" + ): # if COIN_CMD but lp_solver_path is empty + self.logger.warning( + "lp_solver=COIN_CMD but lp_solver_path=empty, attempting to use lp_solver_path=/usr/bin/cbc" + ) + self.lp_solver_path = "/usr/bin/cbc" + + def perform_optimization( + self, + data_opt: pd.DataFrame, + P_PV: np.array, + P_load: np.array, + unit_load_cost: np.array, + unit_prod_price: np.array, + soc_init: Optional[float] = None, + soc_final: Optional[float] = None, + def_total_hours: Optional[list] = None, + def_start_timestep: Optional[list] = None, + def_end_timestep: Optional[list] = None, + debug: Optional[bool] = False, + ) -> pd.DataFrame: r""" Perform the actual optimization using linear programming (LP). @@ -138,24 +162,26 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n """ # Prepare some data in the case of a battery - if self.optim_conf['set_use_battery']: + if self.optim_conf["set_use_battery"]: if soc_init is None: if soc_final is not None: soc_init = soc_final else: - soc_init = self.plant_conf['battery_target_state_of_charge'] + soc_init = self.plant_conf["battery_target_state_of_charge"] if soc_final is None: if soc_init is not None: soc_final = soc_init else: - soc_final = self.plant_conf['battery_target_state_of_charge'] + soc_final = self.plant_conf["battery_target_state_of_charge"] if def_total_hours is None: - def_total_hours = self.optim_conf['operating_hours_of_each_deferrable_load'] + def_total_hours = self.optim_conf["operating_hours_of_each_deferrable_load"] if def_start_timestep is None: - def_start_timestep = self.optim_conf['start_timesteps_of_each_deferrable_load'] + def_start_timestep = self.optim_conf[ + "start_timesteps_of_each_deferrable_load" + ] if def_end_timestep is None: - def_end_timestep = self.optim_conf['end_timesteps_of_each_deferrable_load'] - type_self_conso = 'bigm' # maxmin + def_end_timestep = self.optim_conf["end_timesteps_of_each_deferrable_load"] + type_self_conso = "bigm" # maxmin #### The LP problem using Pulp #### opt_model = plp.LpProblem("LP_Model", plp.LpMaximize) @@ -165,398 +191,698 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n M = 10e10 ## Add decision variables - P_grid_neg = {(i):plp.LpVariable(cat='Continuous', - lowBound=-self.plant_conf['maximum_power_to_grid'], upBound=0, - name="P_grid_neg{}".format(i)) for i in set_I} - P_grid_pos = {(i):plp.LpVariable(cat='Continuous', - lowBound=0, upBound=self.plant_conf['maximum_power_from_grid'], - name="P_grid_pos{}".format(i)) for i in set_I} + P_grid_neg = { + (i): plp.LpVariable( + cat="Continuous", + lowBound=-self.plant_conf["maximum_power_to_grid"], + upBound=0, + name="P_grid_neg{}".format(i), + ) + for i in set_I + } + P_grid_pos = { + (i): plp.LpVariable( + cat="Continuous", + lowBound=0, + upBound=self.plant_conf["maximum_power_from_grid"], + name="P_grid_pos{}".format(i), + ) + for i in set_I + } P_deferrable = [] P_def_bin1 = [] - for k in range(self.optim_conf['number_of_deferrable_loads']): - if type(self.optim_conf['nominal_power_of_deferrable_loads'][k]) == list: - upBound = np.max(self.optim_conf['nominal_power_of_deferrable_loads'][k]) + for k in range(self.optim_conf["number_of_deferrable_loads"]): + if type(self.optim_conf["nominal_power_of_deferrable_loads"][k]) == list: + upBound = np.max( + self.optim_conf["nominal_power_of_deferrable_loads"][k] + ) else: - upBound = self.optim_conf['nominal_power_of_deferrable_loads'][k] - if self.optim_conf['treat_deferrable_load_as_semi_cont'][k]: - P_deferrable.append({(i):plp.LpVariable(cat='Continuous', - name="P_deferrable{}_{}".format(k, i)) for i in set_I}) + upBound = self.optim_conf["nominal_power_of_deferrable_loads"][k] + if self.optim_conf["treat_deferrable_load_as_semi_cont"][k]: + P_deferrable.append( + { + (i): plp.LpVariable( + cat="Continuous", name="P_deferrable{}_{}".format(k, i) + ) + for i in set_I + } + ) else: - P_deferrable.append({(i):plp.LpVariable(cat='Continuous', - lowBound=0, upBound=upBound, - name="P_deferrable{}_{}".format(k, i)) for i in set_I}) - P_def_bin1.append({(i):plp.LpVariable(cat='Binary', - name="P_def{}_bin1_{}".format(k, i)) for i in set_I}) + P_deferrable.append( + { + (i): plp.LpVariable( + cat="Continuous", + lowBound=0, + upBound=upBound, + name="P_deferrable{}_{}".format(k, i), + ) + for i in set_I + } + ) + P_def_bin1.append( + { + (i): plp.LpVariable( + cat="Binary", name="P_def{}_bin1_{}".format(k, i) + ) + for i in set_I + } + ) P_def_start = [] P_def_bin2 = [] - for k in range(self.optim_conf['number_of_deferrable_loads']): - P_def_start.append({(i):plp.LpVariable(cat='Binary', - name="P_def{}_start_{}".format(k, i)) for i in set_I}) - P_def_bin2.append({(i):plp.LpVariable(cat='Binary', - name="P_def{}_bin2_{}".format(k, i)) for i in set_I}) - D = {(i):plp.LpVariable(cat='Binary', - name="D_{}".format(i)) for i in set_I} - E = {(i):plp.LpVariable(cat='Binary', - name="E_{}".format(i)) for i in set_I} - if self.optim_conf['set_use_battery']: - P_sto_pos = {(i):plp.LpVariable(cat='Continuous', - lowBound=0, upBound=self.plant_conf['battery_discharge_power_max'], - name="P_sto_pos_{0}".format(i)) for i in set_I} - P_sto_neg = {(i):plp.LpVariable(cat='Continuous', - lowBound=-self.plant_conf['battery_charge_power_max'], upBound=0, - name="P_sto_neg_{0}".format(i)) for i in set_I} + for k in range(self.optim_conf["number_of_deferrable_loads"]): + P_def_start.append( + { + (i): plp.LpVariable( + cat="Binary", name="P_def{}_start_{}".format(k, i) + ) + for i in set_I + } + ) + P_def_bin2.append( + { + (i): plp.LpVariable( + cat="Binary", name="P_def{}_bin2_{}".format(k, i) + ) + for i in set_I + } + ) + D = {(i): plp.LpVariable(cat="Binary", name="D_{}".format(i)) for i in set_I} + E = {(i): plp.LpVariable(cat="Binary", name="E_{}".format(i)) for i in set_I} + if self.optim_conf["set_use_battery"]: + P_sto_pos = { + (i): plp.LpVariable( + cat="Continuous", + lowBound=0, + upBound=self.plant_conf["battery_discharge_power_max"], + name="P_sto_pos_{0}".format(i), + ) + for i in set_I + } + P_sto_neg = { + (i): plp.LpVariable( + cat="Continuous", + lowBound=-self.plant_conf["battery_charge_power_max"], + upBound=0, + name="P_sto_neg_{0}".format(i), + ) + for i in set_I + } else: - P_sto_pos = {(i):i*0 for i in set_I} - P_sto_neg = {(i):i*0 for i in set_I} - - if self.costfun == 'self-consumption': - SC = {(i):plp.LpVariable(cat='Continuous', - name="SC_{}".format(i)) for i in set_I} - if self.plant_conf['inverter_is_hybrid']: - P_hybrid_inverter = {(i):plp.LpVariable(cat='Continuous', - name="P_hybrid_inverter{}".format(i)) for i in set_I} - P_PV_curtailment = {(i):plp.LpVariable(cat='Continuous', lowBound=0, - name="P_PV_curtailment{}".format(i)) for i in set_I} - + P_sto_pos = {(i): i * 0 for i in set_I} + P_sto_neg = {(i): i * 0 for i in set_I} + + if self.costfun == "self-consumption": + SC = { + (i): plp.LpVariable(cat="Continuous", name="SC_{}".format(i)) + for i in set_I + } + if self.plant_conf["inverter_is_hybrid"]: + P_hybrid_inverter = { + (i): plp.LpVariable( + cat="Continuous", name="P_hybrid_inverter{}".format(i) + ) + for i in set_I + } + P_PV_curtailment = { + (i): plp.LpVariable( + cat="Continuous", lowBound=0, name="P_PV_curtailment{}".format(i) + ) + for i in set_I + } + ## Define objective - P_def_sum= [] + P_def_sum = [] for i in set_I: - P_def_sum.append(plp.lpSum(P_deferrable[k][i] for k in range(self.optim_conf['number_of_deferrable_loads']))) - if self.costfun == 'profit': - if self.optim_conf['set_total_pv_sell']: - objective = plp.lpSum(-0.001*self.timeStep*(unit_load_cost[i]*(P_load[i] + P_def_sum[i]) + \ - unit_prod_price[i]*P_grid_neg[i]) for i in set_I) + P_def_sum.append( + plp.lpSum( + P_deferrable[k][i] + for k in range(self.optim_conf["number_of_deferrable_loads"]) + ) + ) + if self.costfun == "profit": + if self.optim_conf["set_total_pv_sell"]: + objective = plp.lpSum( + -0.001 + * self.timeStep + * ( + unit_load_cost[i] * (P_load[i] + P_def_sum[i]) + + unit_prod_price[i] * P_grid_neg[i] + ) + for i in set_I + ) else: - objective = plp.lpSum(-0.001*self.timeStep*(unit_load_cost[i]*P_grid_pos[i] + \ - unit_prod_price[i]*P_grid_neg[i]) for i in set_I) - elif self.costfun == 'cost': - if self.optim_conf['set_total_pv_sell']: - objective = plp.lpSum(-0.001*self.timeStep*unit_load_cost[i]*(P_load[i] + P_def_sum[i]) for i in set_I) + objective = plp.lpSum( + -0.001 + * self.timeStep + * ( + unit_load_cost[i] * P_grid_pos[i] + + unit_prod_price[i] * P_grid_neg[i] + ) + for i in set_I + ) + elif self.costfun == "cost": + if self.optim_conf["set_total_pv_sell"]: + objective = plp.lpSum( + -0.001 + * self.timeStep + * unit_load_cost[i] + * (P_load[i] + P_def_sum[i]) + for i in set_I + ) else: - objective = plp.lpSum(-0.001*self.timeStep*unit_load_cost[i]*P_grid_pos[i] for i in set_I) - elif self.costfun == 'self-consumption': - if type_self_conso == 'bigm': + objective = plp.lpSum( + -0.001 * self.timeStep * unit_load_cost[i] * P_grid_pos[i] + for i in set_I + ) + elif self.costfun == "self-consumption": + if type_self_conso == "bigm": bigm = 1e3 - objective = plp.lpSum(-0.001*self.timeStep*(bigm*unit_load_cost[i]*P_grid_pos[i] + \ - unit_prod_price[i]*P_grid_neg[i]) for i in set_I) - elif type_self_conso == 'maxmin': - objective = plp.lpSum(0.001*self.timeStep*unit_load_cost[i]*SC[i] for i in set_I) + objective = plp.lpSum( + -0.001 + * self.timeStep + * ( + bigm * unit_load_cost[i] * P_grid_pos[i] + + unit_prod_price[i] * P_grid_neg[i] + ) + for i in set_I + ) + elif type_self_conso == "maxmin": + objective = plp.lpSum( + 0.001 * self.timeStep * unit_load_cost[i] * SC[i] for i in set_I + ) else: self.logger.error("Not a valid option for type_self_conso parameter") else: self.logger.error("The cost function specified type is not valid") # Add more terms to the objective function in the case of battery use - if self.optim_conf['set_use_battery']: - objective = objective + plp.lpSum(-0.001*self.timeStep*( - self.optim_conf['weight_battery_discharge']*P_sto_pos[i] + \ - self.optim_conf['weight_battery_charge']*P_sto_neg[i]) for i in set_I) + if self.optim_conf["set_use_battery"]: + objective = objective + plp.lpSum( + -0.001 + * self.timeStep + * ( + self.optim_conf["weight_battery_discharge"] * P_sto_pos[i] + + self.optim_conf["weight_battery_charge"] * P_sto_neg[i] + ) + for i in set_I + ) # Add term penalizing each startup where configured - if ('set_deferrable_startup_penalty' in self.optim_conf and self.optim_conf['set_deferrable_startup_penalty']): - for k in range(self.optim_conf['number_of_deferrable_loads']): - if (len(self.optim_conf['set_deferrable_startup_penalty']) > k and self.optim_conf['set_deferrable_startup_penalty'][k]): + if ( + "set_deferrable_startup_penalty" in self.optim_conf + and self.optim_conf["set_deferrable_startup_penalty"] + ): + for k in range(self.optim_conf["number_of_deferrable_loads"]): + if ( + len(self.optim_conf["set_deferrable_startup_penalty"]) > k + and self.optim_conf["set_deferrable_startup_penalty"][k] + ): objective = objective + plp.lpSum( - -0.001 * self.timeStep * self.optim_conf['set_deferrable_startup_penalty'][k] * P_def_start[k][i] *\ - unit_load_cost[i] * self.optim_conf['nominal_power_of_deferrable_loads'][k] - for i in set_I) + -0.001 + * self.timeStep + * self.optim_conf["set_deferrable_startup_penalty"][k] + * P_def_start[k][i] + * unit_load_cost[i] + * self.optim_conf["nominal_power_of_deferrable_loads"][k] + for i in set_I + ) opt_model.setObjective(objective) ## Setting constraints # The main constraint: power balance - if self.plant_conf['inverter_is_hybrid']: - constraints = {"constraint_main1_{}".format(i) : - plp.LpConstraint( - e = P_hybrid_inverter[i] - P_def_sum[i] - P_load[i] + P_grid_neg[i] + P_grid_pos[i] , - sense = plp.LpConstraintEQ, - rhs = 0) - for i in set_I} + if self.plant_conf["inverter_is_hybrid"]: + constraints = { + "constraint_main1_{}".format(i): plp.LpConstraint( + e=P_hybrid_inverter[i] + - P_def_sum[i] + - P_load[i] + + P_grid_neg[i] + + P_grid_pos[i], + sense=plp.LpConstraintEQ, + rhs=0, + ) + for i in set_I + } else: - if self.plant_conf['compute_curtailment']: - constraints = {"constraint_main2_{}".format(i) : - plp.LpConstraint( - e = P_PV[i] - P_PV_curtailment[i] - P_def_sum[i] - P_load[i] + P_grid_neg[i] + P_grid_pos[i] + P_sto_pos[i] + P_sto_neg[i], - sense = plp.LpConstraintEQ, - rhs = 0) - for i in set_I} + if self.plant_conf["compute_curtailment"]: + constraints = { + "constraint_main2_{}".format(i): plp.LpConstraint( + e=P_PV[i] + - P_PV_curtailment[i] + - P_def_sum[i] + - P_load[i] + + P_grid_neg[i] + + P_grid_pos[i] + + P_sto_pos[i] + + P_sto_neg[i], + sense=plp.LpConstraintEQ, + rhs=0, + ) + for i in set_I + } else: - constraints = {"constraint_main3_{}".format(i) : - plp.LpConstraint( - e = P_PV[i] - P_def_sum[i] - P_load[i] + P_grid_neg[i] + P_grid_pos[i] + P_sto_pos[i] + P_sto_neg[i], - sense = plp.LpConstraintEQ, - rhs = 0) - for i in set_I} - + constraints = { + "constraint_main3_{}".format(i): plp.LpConstraint( + e=P_PV[i] + - P_def_sum[i] + - P_load[i] + + P_grid_neg[i] + + P_grid_pos[i] + + P_sto_pos[i] + + P_sto_neg[i], + sense=plp.LpConstraintEQ, + rhs=0, + ) + for i in set_I + } + # Constraint for hybrid inverter and curtailment cases - if type(self.plant_conf['pv_module_model']) == list: + if type(self.plant_conf["pv_module_model"]) == list: P_nom_inverter = 0.0 - for i in range(len(self.plant_conf['pv_inverter_model'])): - if type(self.plant_conf['pv_inverter_model'][i]) == str: - cec_inverters = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_inverters.pbz2', "rb") + for i in range(len(self.plant_conf["pv_inverter_model"])): + if type(self.plant_conf["pv_inverter_model"][i]) == str: + cec_inverters = bz2.BZ2File( + pathlib.Path(__file__).parent / "data/cec_inverters.pbz2", "rb" + ) cec_inverters = cPickle.load(cec_inverters) - inverter = cec_inverters[self.plant_conf['pv_inverter_model'][i]] + inverter = cec_inverters[self.plant_conf["pv_inverter_model"][i]] P_nom_inverter += inverter.Paco else: - P_nom_inverter += self.plant_conf['pv_inverter_model'][i] + P_nom_inverter += self.plant_conf["pv_inverter_model"][i] else: - if type(self.plant_conf['pv_inverter_model'][i]) == str: - cec_inverters = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_inverters.pbz2', "rb") + if type(self.plant_conf["pv_inverter_model"][i]) == str: + cec_inverters = bz2.BZ2File( + pathlib.Path(__file__).parent / "data/cec_inverters.pbz2", "rb" + ) cec_inverters = cPickle.load(cec_inverters) - inverter = cec_inverters[self.plant_conf['pv_inverter_model']] + inverter = cec_inverters[self.plant_conf["pv_inverter_model"]] P_nom_inverter = inverter.Paco else: - P_nom_inverter = self.plant_conf['pv_inverter_model'] - if self.plant_conf['inverter_is_hybrid']: - constraints.update({"constraint_hybrid_inverter1_{}".format(i) : - plp.LpConstraint( - e = P_PV[i] - P_PV_curtailment[i] + P_sto_pos[i] + P_sto_neg[i] - P_nom_inverter, - sense = plp.LpConstraintLE, - rhs = 0) - for i in set_I}) - constraints.update({"constraint_hybrid_inverter2_{}".format(i) : - plp.LpConstraint( - e = P_PV[i] - P_PV_curtailment[i] + P_sto_pos[i] + P_sto_neg[i] - P_hybrid_inverter[i], - sense = plp.LpConstraintEQ, - rhs = 0) - for i in set_I}) + P_nom_inverter = self.plant_conf["pv_inverter_model"] + if self.plant_conf["inverter_is_hybrid"]: + constraints.update( + { + "constraint_hybrid_inverter1_{}".format(i): plp.LpConstraint( + e=P_PV[i] + - P_PV_curtailment[i] + + P_sto_pos[i] + + P_sto_neg[i] + - P_nom_inverter, + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_hybrid_inverter2_{}".format(i): plp.LpConstraint( + e=P_PV[i] + - P_PV_curtailment[i] + + P_sto_pos[i] + + P_sto_neg[i] + - P_hybrid_inverter[i], + sense=plp.LpConstraintEQ, + rhs=0, + ) + for i in set_I + } + ) else: - if self.plant_conf['compute_curtailment']: - constraints.update({"constraint_curtailment_{}".format(i) : - plp.LpConstraint( - e = P_PV_curtailment[i] - max(P_PV[i],0), - sense = plp.LpConstraintLE, - rhs = 0) - for i in set_I}) + if self.plant_conf["compute_curtailment"]: + constraints.update( + { + "constraint_curtailment_{}".format(i): plp.LpConstraint( + e=P_PV_curtailment[i] - max(P_PV[i], 0), + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) # Two special constraints just for a self-consumption cost function - if self.costfun == 'self-consumption': - if type_self_conso == 'maxmin': # maxmin linear problem - constraints.update({"constraint_selfcons_PV1_{}".format(i) : - plp.LpConstraint( - e = SC[i] - P_PV[i], - sense = plp.LpConstraintLE, - rhs = 0) - for i in set_I}) - constraints.update({"constraint_selfcons_PV2_{}".format(i) : - plp.LpConstraint( - e = SC[i] - P_load[i] - P_def_sum[i], - sense = plp.LpConstraintLE, - rhs = 0) - for i in set_I}) + if self.costfun == "self-consumption": + if type_self_conso == "maxmin": # maxmin linear problem + constraints.update( + { + "constraint_selfcons_PV1_{}".format(i): plp.LpConstraint( + e=SC[i] - P_PV[i], sense=plp.LpConstraintLE, rhs=0 + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_selfcons_PV2_{}".format(i): plp.LpConstraint( + e=SC[i] - P_load[i] - P_def_sum[i], + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) # Avoid injecting and consuming from grid at the same time - constraints.update({"constraint_pgridpos_{}".format(i) : - plp.LpConstraint( - e = P_grid_pos[i] - self.plant_conf['maximum_power_from_grid']*D[i], - sense = plp.LpConstraintLE, - rhs = 0) - for i in set_I}) - constraints.update({"constraint_pgridneg_{}".format(i) : - plp.LpConstraint( - e = -P_grid_neg[i] - self.plant_conf['maximum_power_to_grid']*(1-D[i]), - sense = plp.LpConstraintLE, - rhs = 0) - for i in set_I}) + constraints.update( + { + "constraint_pgridpos_{}".format(i): plp.LpConstraint( + e=P_grid_pos[i] - self.plant_conf["maximum_power_from_grid"] * D[i], + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_pgridneg_{}".format(i): plp.LpConstraint( + e=-P_grid_neg[i] + - self.plant_conf["maximum_power_to_grid"] * (1 - D[i]), + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) # Treat deferrable loads constraints predicted_temps = {} - for k in range(self.optim_conf['number_of_deferrable_loads']): - - if type(self.optim_conf['nominal_power_of_deferrable_loads'][k]) == list: + for k in range(self.optim_conf["number_of_deferrable_loads"]): + if type(self.optim_conf["nominal_power_of_deferrable_loads"][k]) == list: # Constraint for sequence of deferrable # WARNING: This is experimental, formulation seems correct but feasibility problems. # Probably uncomptabile with other constraints - power_sequence = self.optim_conf['nominal_power_of_deferrable_loads'][k] + power_sequence = self.optim_conf["nominal_power_of_deferrable_loads"][k] sequence_length = len(power_sequence) + def create_matrix(input_list, n): matrix = [] for i in range(n + 1): row = [0] * i + input_list + [0] * (n - i) - matrix.append(row[:n*2]) + matrix.append(row[: n * 2]) return matrix - matrix = create_matrix(power_sequence, n-sequence_length) - y = plp.LpVariable.dicts(f"y{k}", (i for i in range(len(matrix))), cat='Binary') - constraints.update({f"single_value_constraint_{k}" : - plp.LpConstraint( - e = plp.lpSum(y[i] for i in range(len(matrix))) - 1, - sense = plp.LpConstraintEQ, - rhs = 0) - }) - constraints.update({f"pdef{k}_sumconstraint_{i}" : - plp.LpConstraint( - e = plp.lpSum(P_deferrable[k][i] for i in set_I) - np.sum(power_sequence), - sense = plp.LpConstraintEQ, - rhs = 0) - }) - constraints.update({f"pdef{k}_positive_constraint_{i}" : - plp.LpConstraint( - e = P_deferrable[k][i], - sense = plp.LpConstraintGE, - rhs = 0) - for i in set_I}) + + matrix = create_matrix(power_sequence, n - sequence_length) + y = plp.LpVariable.dicts( + f"y{k}", (i for i in range(len(matrix))), cat="Binary" + ) + constraints.update( + { + f"single_value_constraint_{k}": plp.LpConstraint( + e=plp.lpSum(y[i] for i in range(len(matrix))) - 1, + sense=plp.LpConstraintEQ, + rhs=0, + ) + } + ) + constraints.update( + { + f"pdef{k}_sumconstraint_{i}": plp.LpConstraint( + e=plp.lpSum(P_deferrable[k][i] for i in set_I) + - np.sum(power_sequence), + sense=plp.LpConstraintEQ, + rhs=0, + ) + } + ) + constraints.update( + { + f"pdef{k}_positive_constraint_{i}": plp.LpConstraint( + e=P_deferrable[k][i], sense=plp.LpConstraintGE, rhs=0 + ) + for i in set_I + } + ) for num, mat in enumerate(matrix): - constraints.update({f"pdef{k}_value_constraint_{num}_{i}" : - plp.LpConstraint( - e = P_deferrable[k][i] - mat[i]*y[num], - sense = plp.LpConstraintEQ, - rhs = 0) - for i in set_I}) - + constraints.update( + { + f"pdef{k}_value_constraint_{num}_{i}": plp.LpConstraint( + e=P_deferrable[k][i] - mat[i] * y[num], + sense=plp.LpConstraintEQ, + rhs=0, + ) + for i in set_I + } + ) + elif "def_load_config" in self.optim_conf.keys(): if "thermal_config" in self.optim_conf["def_load_config"][k]: # Special case of a thermal deferrable load - def_load_config = self.optim_conf['def_load_config'][k] - if def_load_config and 'thermal_config' in def_load_config: + def_load_config = self.optim_conf["def_load_config"][k] + if def_load_config and "thermal_config" in def_load_config: hc = def_load_config["thermal_config"] start_temperature = hc["start_temperature"] cooling_constant = hc["cooling_constant"] heating_rate = hc["heating_rate"] overshoot_temperature = hc["overshoot_temperature"] - outdoor_temperature_forecast = data_opt['outdoor_temperature_forecast'] + outdoor_temperature_forecast = data_opt[ + "outdoor_temperature_forecast" + ] desired_temperatures = hc["desired_temperatures"] - sense = hc.get('sense', 'heat') + sense = hc.get("sense", "heat") predicted_temp = [start_temperature] for I in set_I: if I == 0: continue predicted_temp.append( - predicted_temp[I-1] - + (P_deferrable[k][I-1] * (heating_rate * self.timeStep / self.optim_conf['nominal_power_of_deferrable_loads'][k])) - - (cooling_constant * (predicted_temp[I-1] - outdoor_temperature_forecast[I-1]))) - if len(desired_temperatures) > I and desired_temperatures[I]: - constraints.update({"constraint_defload{}_temperature_{}".format(k, I): - plp.LpConstraint( - e = predicted_temp[I], - sense = plp.LpConstraintGE if sense == 'heat' else plp.LpConstraintLE, - rhs = desired_temperatures[I], + predicted_temp[I - 1] + + ( + P_deferrable[k][I - 1] + * ( + heating_rate + * self.timeStep + / self.optim_conf[ + "nominal_power_of_deferrable_loads" + ][k] + ) + ) + - ( + cooling_constant + * ( + predicted_temp[I - 1] + - outdoor_temperature_forecast[I - 1] ) - }) - constraints.update({"constraint_defload{}_overshoot_temp_{}".format(k, I): - plp.LpConstraint( - e = predicted_temp[I], - sense = plp.LpConstraintLE if sense == 'heat' else plp.LpConstraintGE, - rhs = overshoot_temperature, + ) ) - for I in set_I}) + if ( + len(desired_temperatures) > I + and desired_temperatures[I] + ): + constraints.update( + { + "constraint_defload{}_temperature_{}".format( + k, I + ): plp.LpConstraint( + e=predicted_temp[I], + sense=plp.LpConstraintGE + if sense == "heat" + else plp.LpConstraintLE, + rhs=desired_temperatures[I], + ) + } + ) + constraints.update( + { + "constraint_defload{}_overshoot_temp_{}".format( + k, I + ): plp.LpConstraint( + e=predicted_temp[I], + sense=plp.LpConstraintLE + if sense == "heat" + else plp.LpConstraintGE, + rhs=overshoot_temperature, + ) + for I in set_I + } + ) predicted_temps[k] = predicted_temp - + else: - if def_total_hours[k] > 0: # Total time of deferrable load - constraints.update({"constraint_defload{}_energy".format(k) : - plp.LpConstraint( - e = plp.lpSum(P_deferrable[k][i]*self.timeStep for i in set_I), - sense = plp.LpConstraintEQ, - rhs = def_total_hours[k]*self.optim_conf['nominal_power_of_deferrable_loads'][k]) - }) - + constraints.update( + { + "constraint_defload{}_energy".format(k): plp.LpConstraint( + e=plp.lpSum( + P_deferrable[k][i] * self.timeStep for i in set_I + ), + sense=plp.LpConstraintEQ, + rhs=def_total_hours[k] + * self.optim_conf["nominal_power_of_deferrable_loads"][ + k + ], + ) + } + ) + # Ensure deferrable loads consume energy between def_start_timestep & def_end_timestep - self.logger.debug("Deferrable load {}: Proposed optimization window: {} --> {}".format( - k, def_start_timestep[k], def_end_timestep[k])) + self.logger.debug( + "Deferrable load {}: Proposed optimization window: {} --> {}".format( + k, def_start_timestep[k], def_end_timestep[k] + ) + ) def_start, def_end, warning = Optimization.validate_def_timewindow( - def_start_timestep[k], def_end_timestep[k], ceil(def_total_hours[k]/self.timeStep), n) - if warning is not None: + def_start_timestep[k], + def_end_timestep[k], + ceil(def_total_hours[k] / self.timeStep), + n, + ) + if warning is not None: self.logger.warning("Deferrable load {} : {}".format(k, warning)) - self.logger.debug("Deferrable load {}: Validated optimization window: {} --> {}".format( - k, def_start, def_end)) - if def_start > 0: - constraints.update({"constraint_defload{}_start_timestep".format(k) : - plp.LpConstraint( - e = plp.lpSum(P_deferrable[k][i]*self.timeStep for i in range(0, def_start)), - sense = plp.LpConstraintEQ, - rhs = 0) - }) - if def_end > 0: - constraints.update({"constraint_defload{}_end_timestep".format(k) : - plp.LpConstraint( - e = plp.lpSum(P_deferrable[k][i]*self.timeStep for i in range(def_end, n)), - sense = plp.LpConstraintEQ, - rhs = 0) - }) - + self.logger.debug( + "Deferrable load {}: Validated optimization window: {} --> {}".format( + k, def_start, def_end + ) + ) + if def_start > 0: + constraints.update( + { + "constraint_defload{}_start_timestep".format( + k + ): plp.LpConstraint( + e=plp.lpSum( + P_deferrable[k][i] * self.timeStep + for i in range(0, def_start) + ), + sense=plp.LpConstraintEQ, + rhs=0, + ) + } + ) + if def_end > 0: + constraints.update( + { + "constraint_defload{}_end_timestep".format(k): plp.LpConstraint( + e=plp.lpSum( + P_deferrable[k][i] * self.timeStep + for i in range(def_end, n) + ), + sense=plp.LpConstraintEQ, + rhs=0, + ) + } + ) + # Treat the number of starts for a deferrable load (new method considering current state) current_state = 0 - if ("def_current_state" in self.optim_conf and len(self.optim_conf["def_current_state"]) > k): + if ( + "def_current_state" in self.optim_conf + and len(self.optim_conf["def_current_state"]) > k + ): current_state = 1 if self.optim_conf["def_current_state"][k] else 0 # P_deferrable < P_def_bin2 * 1 million # P_deferrable must be zero if P_def_bin2 is zero - constraints.update({"constraint_pdef{}_start1_{}".format(k, i): - plp.LpConstraint( - e=P_deferrable[k][i] - P_def_bin2[k][i] * M, - sense=plp.LpConstraintLE, - rhs=0) - for i in set_I}) + constraints.update( + { + "constraint_pdef{}_start1_{}".format(k, i): plp.LpConstraint( + e=P_deferrable[k][i] - P_def_bin2[k][i] * M, + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) # P_deferrable - P_def_bin2 <= 0 # P_def_bin2 must be zero if P_deferrable is zero - constraints.update({"constraint_pdef{}_start1a_{}".format(k, i): - plp.LpConstraint( - e=P_def_bin2[k][i] - P_deferrable[k][i], - sense=plp.LpConstraintLE, - rhs=0) - for i in set_I}) + constraints.update( + { + "constraint_pdef{}_start1a_{}".format(k, i): plp.LpConstraint( + e=P_def_bin2[k][i] - P_deferrable[k][i], + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) # P_def_start + P_def_bin2[i-1] >= P_def_bin2[i] # If load is on this cycle (P_def_bin2[i] is 1) then P_def_start must be 1 OR P_def_bin2[i-1] must be 1 # For first timestep, use current state if provided by caller. - constraints.update({"constraint_pdef{}_start2_{}".format(k, i): - plp.LpConstraint( - e=P_def_start[k][i] - - P_def_bin2[k][i] - + (P_def_bin2[k][i - 1] if i - 1 >= 0 else current_state), - sense=plp.LpConstraintGE, - rhs=0) - for i in set_I}) + constraints.update( + { + "constraint_pdef{}_start2_{}".format(k, i): plp.LpConstraint( + e=P_def_start[k][i] + - P_def_bin2[k][i] + + (P_def_bin2[k][i - 1] if i - 1 >= 0 else current_state), + sense=plp.LpConstraintGE, + rhs=0, + ) + for i in set_I + } + ) # P_def_bin2[i-1] + P_def_start <= 1 # If load started this cycle (P_def_start[i] is 1) then P_def_bin2[i-1] must be 0 - constraints.update({"constraint_pdef{}_start3_{}".format(k, i): - plp.LpConstraint( - e=(P_def_bin2[k][i-1] if i-1 >= 0 else 0) + P_def_start[k][i], - sense=plp.LpConstraintLE, - rhs=1) - for i in set_I}) - + constraints.update( + { + "constraint_pdef{}_start3_{}".format(k, i): plp.LpConstraint( + e=(P_def_bin2[k][i - 1] if i - 1 >= 0 else 0) + + P_def_start[k][i], + sense=plp.LpConstraintLE, + rhs=1, + ) + for i in set_I + } + ) + # Treat deferrable as a fixed value variable with just one startup - if self.optim_conf['set_deferrable_load_single_constant'][k]: + if self.optim_conf["set_deferrable_load_single_constant"][k]: # P_def_start[i] must be 1 for exactly 1 value of i - constraints.update({"constraint_pdef{}_start4".format(k) : - plp.LpConstraint( - e = plp.lpSum(P_def_start[k][i] for i in set_I), - sense = plp.LpConstraintEQ, - rhs = 1) - }) + constraints.update( + { + "constraint_pdef{}_start4".format(k): plp.LpConstraint( + e=plp.lpSum(P_def_start[k][i] for i in set_I), + sense=plp.LpConstraintEQ, + rhs=1, + ) + } + ) # P_def_bin2 must be 1 for exactly the correct number of timesteps. - constraints.update({"constraint_pdef{}_start5".format(k) : - plp.LpConstraint( - e = plp.lpSum(P_def_bin2[k][i] for i in set_I), - sense = plp.LpConstraintEQ, - rhs = def_total_hours[k]/self.timeStep) - }) - + constraints.update( + { + "constraint_pdef{}_start5".format(k): plp.LpConstraint( + e=plp.lpSum(P_def_bin2[k][i] for i in set_I), + sense=plp.LpConstraintEQ, + rhs=def_total_hours[k] / self.timeStep, + ) + } + ) + # Treat deferrable load as a semi-continuous variable - if self.optim_conf['treat_deferrable_load_as_semi_cont'][k]: - constraints.update({"constraint_pdef{}_semicont1_{}".format(k, i) : - plp.LpConstraint( - e=P_deferrable[k][i] - self.optim_conf['nominal_power_of_deferrable_loads'][k]*P_def_bin1[k][i], - sense=plp.LpConstraintGE, - rhs=0) - for i in set_I}) - constraints.update({"constraint_pdef{}_semicont2_{}".format(k, i) : - plp.LpConstraint( - e=P_deferrable[k][i] - self.optim_conf['nominal_power_of_deferrable_loads'][k]*P_def_bin1[k][i], - sense=plp.LpConstraintLE, - rhs=0) - for i in set_I}) - - + if self.optim_conf["treat_deferrable_load_as_semi_cont"][k]: + constraints.update( + { + "constraint_pdef{}_semicont1_{}".format(k, i): plp.LpConstraint( + e=P_deferrable[k][i] + - self.optim_conf["nominal_power_of_deferrable_loads"][k] + * P_def_bin1[k][i], + sense=plp.LpConstraintGE, + rhs=0, + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_pdef{}_semicont2_{}".format(k, i): plp.LpConstraint( + e=P_deferrable[k][i] + - self.optim_conf["nominal_power_of_deferrable_loads"][k] + * P_def_bin1[k][i], + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) + # Treat the number of starts for a deferrable load (old method, kept here just in case) # if self.optim_conf['set_deferrable_load_single_constant'][k]: - # constraints.update({"constraint_pdef{}_start1_{}".format(k, i) : + # constraints.update({"constraint_pdef{}_start1_{}".format(k, i) : # plp.LpConstraint( # e=P_deferrable[k][i] - P_def_bin2[k][i]*M, # sense=plp.LpConstraintLE, # rhs=0) # for i in set_I}) - # constraints.update({"constraint_pdef{}_start2_{}".format(k, i): + # constraints.update({"constraint_pdef{}_start2_{}".format(k, i): # plp.LpConstraint( # e=P_def_start[k][i] - P_def_bin2[k][i] + (P_def_bin2[k][i-1] if i-1 >= 0 else 0), # sense=plp.LpConstraintGE, @@ -570,85 +896,184 @@ def create_matrix(input_list, n): # }) # The battery constraints - if self.optim_conf['set_use_battery']: + if self.optim_conf["set_use_battery"]: # Optional constraints to avoid charging the battery from the grid - if self.optim_conf['set_nocharge_from_grid']: - constraints.update({"constraint_nocharge_from_grid_{}".format(i) : - plp.LpConstraint( - e = P_sto_neg[i] + P_PV[i], - sense = plp.LpConstraintGE, - rhs = 0) - for i in set_I}) + if self.optim_conf["set_nocharge_from_grid"]: + constraints.update( + { + "constraint_nocharge_from_grid_{}".format(i): plp.LpConstraint( + e=P_sto_neg[i] + P_PV[i], sense=plp.LpConstraintGE, rhs=0 + ) + for i in set_I + } + ) # Optional constraints to avoid discharging the battery to the grid - if self.optim_conf['set_nodischarge_to_grid']: - constraints.update({"constraint_nodischarge_to_grid_{}".format(i) : - plp.LpConstraint( - e = P_grid_neg[i] + P_PV[i], - sense = plp.LpConstraintGE, - rhs = 0) - for i in set_I}) + if self.optim_conf["set_nodischarge_to_grid"]: + constraints.update( + { + "constraint_nodischarge_to_grid_{}".format(i): plp.LpConstraint( + e=P_grid_neg[i] + P_PV[i], sense=plp.LpConstraintGE, rhs=0 + ) + for i in set_I + } + ) # Limitation of power dynamics in power per unit of time - if self.optim_conf['set_battery_dynamic']: - constraints.update({"constraint_pos_batt_dynamic_max_{}".format(i) : - plp.LpConstraint(e = P_sto_pos[i+1] - P_sto_pos[i], - sense = plp.LpConstraintLE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_max']*self.plant_conf['battery_discharge_power_max']) - for i in range(n-1)}) - constraints.update({"constraint_pos_batt_dynamic_min_{}".format(i) : - plp.LpConstraint(e = P_sto_pos[i+1] - P_sto_pos[i], - sense = plp.LpConstraintGE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_min']*self.plant_conf['battery_discharge_power_max']) - for i in range(n-1)}) - constraints.update({"constraint_neg_batt_dynamic_max_{}".format(i) : - plp.LpConstraint(e = P_sto_neg[i+1] - P_sto_neg[i], - sense = plp.LpConstraintLE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_max']*self.plant_conf['battery_charge_power_max']) - for i in range(n-1)}) - constraints.update({"constraint_neg_batt_dynamic_min_{}".format(i) : - plp.LpConstraint(e = P_sto_neg[i+1] - P_sto_neg[i], - sense = plp.LpConstraintGE, - rhs = self.timeStep*self.optim_conf['battery_dynamic_min']*self.plant_conf['battery_charge_power_max']) - for i in range(n-1)}) + if self.optim_conf["set_battery_dynamic"]: + constraints.update( + { + "constraint_pos_batt_dynamic_max_{}".format( + i + ): plp.LpConstraint( + e=P_sto_pos[i + 1] - P_sto_pos[i], + sense=plp.LpConstraintLE, + rhs=self.timeStep + * self.optim_conf["battery_dynamic_max"] + * self.plant_conf["battery_discharge_power_max"], + ) + for i in range(n - 1) + } + ) + constraints.update( + { + "constraint_pos_batt_dynamic_min_{}".format( + i + ): plp.LpConstraint( + e=P_sto_pos[i + 1] - P_sto_pos[i], + sense=plp.LpConstraintGE, + rhs=self.timeStep + * self.optim_conf["battery_dynamic_min"] + * self.plant_conf["battery_discharge_power_max"], + ) + for i in range(n - 1) + } + ) + constraints.update( + { + "constraint_neg_batt_dynamic_max_{}".format( + i + ): plp.LpConstraint( + e=P_sto_neg[i + 1] - P_sto_neg[i], + sense=plp.LpConstraintLE, + rhs=self.timeStep + * self.optim_conf["battery_dynamic_max"] + * self.plant_conf["battery_charge_power_max"], + ) + for i in range(n - 1) + } + ) + constraints.update( + { + "constraint_neg_batt_dynamic_min_{}".format( + i + ): plp.LpConstraint( + e=P_sto_neg[i + 1] - P_sto_neg[i], + sense=plp.LpConstraintGE, + rhs=self.timeStep + * self.optim_conf["battery_dynamic_min"] + * self.plant_conf["battery_charge_power_max"], + ) + for i in range(n - 1) + } + ) # Then the classic battery constraints - constraints.update({"constraint_pstopos_{}".format(i) : - plp.LpConstraint( - e=P_sto_pos[i] - self.plant_conf['battery_discharge_efficiency']*self.plant_conf['battery_discharge_power_max']*E[i], - sense=plp.LpConstraintLE, - rhs=0) - for i in set_I}) - constraints.update({"constraint_pstoneg_{}".format(i) : - plp.LpConstraint( - e=-P_sto_neg[i] - (1/self.plant_conf['battery_charge_efficiency'])*self.plant_conf['battery_charge_power_max']*(1-E[i]), - sense=plp.LpConstraintLE, - rhs=0) - for i in set_I}) - constraints.update({"constraint_socmax_{}".format(i) : - plp.LpConstraint( - e=-plp.lpSum(P_sto_pos[j]*(1/self.plant_conf['battery_discharge_efficiency']) + self.plant_conf['battery_charge_efficiency']*P_sto_neg[j] for j in range(i)), - sense=plp.LpConstraintLE, - rhs=(self.plant_conf['battery_nominal_energy_capacity']/self.timeStep)*(self.plant_conf['battery_maximum_state_of_charge'] - soc_init)) - for i in set_I}) - constraints.update({"constraint_socmin_{}".format(i) : - plp.LpConstraint( - e=plp.lpSum(P_sto_pos[j]*(1/self.plant_conf['battery_discharge_efficiency']) + self.plant_conf['battery_charge_efficiency']*P_sto_neg[j] for j in range(i)), - sense=plp.LpConstraintLE, - rhs=(self.plant_conf['battery_nominal_energy_capacity']/self.timeStep)*(soc_init - self.plant_conf['battery_minimum_state_of_charge'])) - for i in set_I}) - constraints.update({"constraint_socfinal_{}".format(0) : - plp.LpConstraint( - e=plp.lpSum(P_sto_pos[i]*(1/self.plant_conf['battery_discharge_efficiency']) + self.plant_conf['battery_charge_efficiency']*P_sto_neg[i] for i in set_I), - sense=plp.LpConstraintEQ, - rhs=(soc_init - soc_final)*self.plant_conf['battery_nominal_energy_capacity']/self.timeStep) - }) + constraints.update( + { + "constraint_pstopos_{}".format(i): plp.LpConstraint( + e=P_sto_pos[i] + - self.plant_conf["battery_discharge_efficiency"] + * self.plant_conf["battery_discharge_power_max"] + * E[i], + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_pstoneg_{}".format(i): plp.LpConstraint( + e=-P_sto_neg[i] + - (1 / self.plant_conf["battery_charge_efficiency"]) + * self.plant_conf["battery_charge_power_max"] + * (1 - E[i]), + sense=plp.LpConstraintLE, + rhs=0, + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_socmax_{}".format(i): plp.LpConstraint( + e=-plp.lpSum( + P_sto_pos[j] + * (1 / self.plant_conf["battery_discharge_efficiency"]) + + self.plant_conf["battery_charge_efficiency"] + * P_sto_neg[j] + for j in range(i) + ), + sense=plp.LpConstraintLE, + rhs=( + self.plant_conf["battery_nominal_energy_capacity"] + / self.timeStep + ) + * ( + self.plant_conf["battery_maximum_state_of_charge"] + - soc_init + ), + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_socmin_{}".format(i): plp.LpConstraint( + e=plp.lpSum( + P_sto_pos[j] + * (1 / self.plant_conf["battery_discharge_efficiency"]) + + self.plant_conf["battery_charge_efficiency"] + * P_sto_neg[j] + for j in range(i) + ), + sense=plp.LpConstraintLE, + rhs=( + self.plant_conf["battery_nominal_energy_capacity"] + / self.timeStep + ) + * ( + soc_init + - self.plant_conf["battery_minimum_state_of_charge"] + ), + ) + for i in set_I + } + ) + constraints.update( + { + "constraint_socfinal_{}".format(0): plp.LpConstraint( + e=plp.lpSum( + P_sto_pos[i] + * (1 / self.plant_conf["battery_discharge_efficiency"]) + + self.plant_conf["battery_charge_efficiency"] + * P_sto_neg[i] + for i in set_I + ), + sense=plp.LpConstraintEQ, + rhs=(soc_init - soc_final) + * self.plant_conf["battery_nominal_energy_capacity"] + / self.timeStep, + ) + } + ) opt_model.constraints = constraints ## Finally, we call the solver to solve our optimization model: # solving with default solver CBC - if self.lp_solver == 'PULP_CBC_CMD': + if self.lp_solver == "PULP_CBC_CMD": opt_model.solve(PULP_CBC_CMD(msg=0)) - elif self.lp_solver == 'GLPK_CMD': + elif self.lp_solver == "GLPK_CMD": opt_model.solve(GLPK_CMD(msg=0)) - elif self.lp_solver == 'COIN_CMD': + elif self.lp_solver == "COIN_CMD": opt_model.solve(COIN_CMD(msg=0, path=self.lp_solver_path)) else: self.logger.warning("Solver %s unknown, using default", self.lp_solver) @@ -661,65 +1086,133 @@ def create_matrix(input_list, n): self.logger.warning("Cost function cannot be evaluated") return else: - self.logger.info("Total value of the Cost function = %.02f", plp.value(opt_model.objective)) + self.logger.info( + "Total value of the Cost function = %.02f", + plp.value(opt_model.objective), + ) # Build results Dataframe opt_tp = pd.DataFrame() opt_tp["P_PV"] = [P_PV[i] for i in set_I] opt_tp["P_Load"] = [P_load[i] for i in set_I] - for k in range(self.optim_conf['number_of_deferrable_loads']): - opt_tp["P_deferrable{}".format(k)] = [P_deferrable[k][i].varValue for i in set_I] + for k in range(self.optim_conf["number_of_deferrable_loads"]): + opt_tp["P_deferrable{}".format(k)] = [ + P_deferrable[k][i].varValue for i in set_I + ] opt_tp["P_grid_pos"] = [P_grid_pos[i].varValue for i in set_I] opt_tp["P_grid_neg"] = [P_grid_neg[i].varValue for i in set_I] - opt_tp["P_grid"] = [P_grid_pos[i].varValue + P_grid_neg[i].varValue for i in set_I] - if self.optim_conf['set_use_battery']: - opt_tp["P_batt"] = [P_sto_pos[i].varValue + P_sto_neg[i].varValue for i in set_I] - SOC_opt_delta = [(P_sto_pos[i].varValue*(1/self.plant_conf['battery_discharge_efficiency']) + \ - self.plant_conf['battery_charge_efficiency']*P_sto_neg[i].varValue)*( - self.timeStep/(self.plant_conf['battery_nominal_energy_capacity'])) for i in set_I] + opt_tp["P_grid"] = [ + P_grid_pos[i].varValue + P_grid_neg[i].varValue for i in set_I + ] + if self.optim_conf["set_use_battery"]: + opt_tp["P_batt"] = [ + P_sto_pos[i].varValue + P_sto_neg[i].varValue for i in set_I + ] + SOC_opt_delta = [ + ( + P_sto_pos[i].varValue + * (1 / self.plant_conf["battery_discharge_efficiency"]) + + self.plant_conf["battery_charge_efficiency"] + * P_sto_neg[i].varValue + ) + * (self.timeStep / (self.plant_conf["battery_nominal_energy_capacity"])) + for i in set_I + ] SOCinit = copy.copy(soc_init) SOC_opt = [] for i in set_I: SOC_opt.append(SOCinit - SOC_opt_delta[i]) SOCinit = SOC_opt[i] opt_tp["SOC_opt"] = SOC_opt - if self.plant_conf['inverter_is_hybrid']: + if self.plant_conf["inverter_is_hybrid"]: opt_tp["P_hybrid_inverter"] = [P_hybrid_inverter[i].varValue for i in set_I] - if self.plant_conf['compute_curtailment']: + if self.plant_conf["compute_curtailment"]: opt_tp["P_PV_curtailment"] = [P_PV_curtailment[i].varValue for i in set_I] opt_tp.index = data_opt.index # Lets compute the optimal cost function P_def_sum_tp = [] for i in set_I: - P_def_sum_tp.append(sum(P_deferrable[k][i].varValue for k in range(self.optim_conf['number_of_deferrable_loads']))) + P_def_sum_tp.append( + sum( + P_deferrable[k][i].varValue + for k in range(self.optim_conf["number_of_deferrable_loads"]) + ) + ) opt_tp["unit_load_cost"] = [unit_load_cost[i] for i in set_I] opt_tp["unit_prod_price"] = [unit_prod_price[i] for i in set_I] - if self.optim_conf['set_total_pv_sell']: - opt_tp["cost_profit"] = [-0.001*self.timeStep*(unit_load_cost[i]*(P_load[i] + P_def_sum_tp[i]) + \ - unit_prod_price[i]*P_grid_neg[i].varValue) for i in set_I] + if self.optim_conf["set_total_pv_sell"]: + opt_tp["cost_profit"] = [ + -0.001 + * self.timeStep + * ( + unit_load_cost[i] * (P_load[i] + P_def_sum_tp[i]) + + unit_prod_price[i] * P_grid_neg[i].varValue + ) + for i in set_I + ] else: - opt_tp["cost_profit"] = [-0.001*self.timeStep*(unit_load_cost[i]*P_grid_pos[i].varValue + \ - unit_prod_price[i]*P_grid_neg[i].varValue) for i in set_I] + opt_tp["cost_profit"] = [ + -0.001 + * self.timeStep + * ( + unit_load_cost[i] * P_grid_pos[i].varValue + + unit_prod_price[i] * P_grid_neg[i].varValue + ) + for i in set_I + ] - if self.costfun == 'profit': - if self.optim_conf['set_total_pv_sell']: - opt_tp["cost_fun_profit"] = [-0.001*self.timeStep*(unit_load_cost[i]*(P_load[i] + P_def_sum_tp[i]) + \ - unit_prod_price[i]*P_grid_neg[i].varValue) for i in set_I] + if self.costfun == "profit": + if self.optim_conf["set_total_pv_sell"]: + opt_tp["cost_fun_profit"] = [ + -0.001 + * self.timeStep + * ( + unit_load_cost[i] * (P_load[i] + P_def_sum_tp[i]) + + unit_prod_price[i] * P_grid_neg[i].varValue + ) + for i in set_I + ] else: - opt_tp["cost_fun_profit"] = [-0.001*self.timeStep*(unit_load_cost[i]*P_grid_pos[i].varValue + \ - unit_prod_price[i]*P_grid_neg[i].varValue) for i in set_I] - elif self.costfun == 'cost': - if self.optim_conf['set_total_pv_sell']: - opt_tp["cost_fun_cost"] = [-0.001*self.timeStep*unit_load_cost[i]*(P_load[i] + P_def_sum_tp[i]) for i in set_I] + opt_tp["cost_fun_profit"] = [ + -0.001 + * self.timeStep + * ( + unit_load_cost[i] * P_grid_pos[i].varValue + + unit_prod_price[i] * P_grid_neg[i].varValue + ) + for i in set_I + ] + elif self.costfun == "cost": + if self.optim_conf["set_total_pv_sell"]: + opt_tp["cost_fun_cost"] = [ + -0.001 + * self.timeStep + * unit_load_cost[i] + * (P_load[i] + P_def_sum_tp[i]) + for i in set_I + ] else: - opt_tp["cost_fun_cost"] = [-0.001*self.timeStep*unit_load_cost[i]*P_grid_pos[i].varValue for i in set_I] - elif self.costfun == 'self-consumption': - if type_self_conso == 'maxmin': - opt_tp["cost_fun_selfcons"] = [-0.001*self.timeStep*unit_load_cost[i]*SC[i].varValue for i in set_I] - elif type_self_conso == 'bigm': - opt_tp["cost_fun_selfcons"] = [-0.001*self.timeStep*(unit_load_cost[i]*P_grid_pos[i].varValue + \ - unit_prod_price[i]*P_grid_neg[i].varValue) for i in set_I] + opt_tp["cost_fun_cost"] = [ + -0.001 * self.timeStep * unit_load_cost[i] * P_grid_pos[i].varValue + for i in set_I + ] + elif self.costfun == "self-consumption": + if type_self_conso == "maxmin": + opt_tp["cost_fun_selfcons"] = [ + -0.001 * self.timeStep * unit_load_cost[i] * SC[i].varValue + for i in set_I + ] + elif type_self_conso == "bigm": + opt_tp["cost_fun_selfcons"] = [ + -0.001 + * self.timeStep + * ( + unit_load_cost[i] * P_grid_pos[i].varValue + + unit_prod_price[i] * P_grid_neg[i].varValue + ) + for i in set_I + ] else: self.logger.error("The cost function specified type is not valid") @@ -728,16 +1221,31 @@ def create_matrix(input_list, n): # Debug variables if debug: - for k in range(self.optim_conf['number_of_deferrable_loads']): + for k in range(self.optim_conf["number_of_deferrable_loads"]): opt_tp[f"P_def_start_{k}"] = [P_def_start[k][i].varValue for i in set_I] opt_tp[f"P_def_bin2_{k}"] = [P_def_bin2[k][i].varValue for i in set_I] for i, predicted_temp in predicted_temps.items(): - opt_tp[f"predicted_temp_heater{i}"] = pd.Series([round(pt.value(), 2) if isinstance(pt, plp.LpAffineExpression) else pt for pt in predicted_temp], index=opt_tp.index) - opt_tp[f"target_temp_heater{i}"] = pd.Series(self.optim_conf["def_load_config"][i]['thermal_config']["desired_temperatures"], index=opt_tp.index) + opt_tp[f"predicted_temp_heater{i}"] = pd.Series( + [ + round(pt.value(), 2) + if isinstance(pt, plp.LpAffineExpression) + else pt + for pt in predicted_temp + ], + index=opt_tp.index, + ) + opt_tp[f"target_temp_heater{i}"] = pd.Series( + self.optim_conf["def_load_config"][i]["thermal_config"][ + "desired_temperatures" + ], + index=opt_tp.index, + ) return opt_tp - def perform_perfect_forecast_optim(self, df_input_data: pd.DataFrame, days_list: pd.date_range) -> pd.DataFrame: + def perform_perfect_forecast_optim( + self, df_input_data: pd.DataFrame, days_list: pd.date_range + ) -> pd.DataFrame: r""" Perform an optimization on historical data (perfectly known PV production). @@ -753,21 +1261,33 @@ def perform_perfect_forecast_optim(self, df_input_data: pd.DataFrame, days_list: """ self.logger.info("Perform optimization for perfect forecast scenario") - self.days_list_tz = days_list.tz_convert(self.time_zone).round(self.freq)[:-1] # Converted to tz and without the current day (today) + self.days_list_tz = days_list.tz_convert(self.time_zone).round(self.freq)[ + :-1 + ] # Converted to tz and without the current day (today) self.opt_res = pd.DataFrame() for day in self.days_list_tz: - self.logger.info("Solving for day: "+str(day.day)+"-"+str(day.month)+"-"+str(day.year)) + self.logger.info( + "Solving for day: " + + str(day.day) + + "-" + + str(day.month) + + "-" + + str(day.year) + ) # Prepare data day_start = day.isoformat() - day_end = (day+self.time_delta-self.freq).isoformat() - data_tp = df_input_data.copy().loc[pd.date_range(start=day_start, end=day_end, freq=self.freq)] + day_end = (day + self.time_delta - self.freq).isoformat() + data_tp = df_input_data.copy().loc[ + pd.date_range(start=day_start, end=day_end, freq=self.freq) + ] P_PV = data_tp[self.var_PV].values P_load = data_tp[self.var_load_new].values - unit_load_cost = data_tp[self.var_load_cost].values # €/kWh - unit_prod_price = data_tp[self.var_prod_price].values # €/kWh + unit_load_cost = data_tp[self.var_load_cost].values # €/kWh + unit_prod_price = data_tp[self.var_prod_price].values # €/kWh # Call optimization function - opt_tp = self.perform_optimization(data_tp, P_PV, P_load, - unit_load_cost, unit_prod_price) + opt_tp = self.perform_optimization( + data_tp, P_PV, P_load, unit_load_cost, unit_prod_price + ) if len(self.opt_res) == 0: self.opt_res = opt_tp else: @@ -775,8 +1295,9 @@ def perform_perfect_forecast_optim(self, df_input_data: pd.DataFrame, days_list: return self.opt_res - def perform_dayahead_forecast_optim(self, df_input_data: pd.DataFrame, - P_PV: pd.Series, P_load: pd.Series) -> pd.DataFrame: + def perform_dayahead_forecast_optim( + self, df_input_data: pd.DataFrame, P_PV: pd.Series, P_load: pd.Series + ) -> pd.DataFrame: r""" Perform a day-ahead optimization task using real forecast data. \ This type of optimization is intented to be launched once a day. @@ -794,19 +1315,30 @@ def perform_dayahead_forecast_optim(self, df_input_data: pd.DataFrame, """ self.logger.info("Perform optimization for the day-ahead") - unit_load_cost = df_input_data[self.var_load_cost].values # €/kWh - unit_prod_price = df_input_data[self.var_prod_price].values # €/kWh + unit_load_cost = df_input_data[self.var_load_cost].values # €/kWh + unit_prod_price = df_input_data[self.var_prod_price].values # €/kWh # Call optimization function - self.opt_res = self.perform_optimization(df_input_data, P_PV.values.ravel(), - P_load.values.ravel(), - unit_load_cost, unit_prod_price) + self.opt_res = self.perform_optimization( + df_input_data, + P_PV.values.ravel(), + P_load.values.ravel(), + unit_load_cost, + unit_prod_price, + ) return self.opt_res - def perform_naive_mpc_optim(self, df_input_data: pd.DataFrame, P_PV: pd.Series, P_load: pd.Series, - prediction_horizon: int, soc_init: Optional[float] = None, soc_final: Optional[float] = None, - def_total_hours: Optional[list] = None, - def_start_timestep: Optional[list] = None, - def_end_timestep: Optional[list] = None) -> pd.DataFrame: + def perform_naive_mpc_optim( + self, + df_input_data: pd.DataFrame, + P_PV: pd.Series, + P_load: pd.Series, + prediction_horizon: int, + soc_init: Optional[float] = None, + soc_final: Optional[float] = None, + def_total_hours: Optional[list] = None, + def_start_timestep: Optional[list] = None, + def_end_timestep: Optional[list] = None, + ) -> pd.DataFrame: r""" Perform a naive approach to a Model Predictive Control (MPC). \ This implementaion is naive because we are not using the formal formulation \ @@ -843,24 +1375,38 @@ def perform_naive_mpc_optim(self, df_input_data: pd.DataFrame, P_PV: pd.Series, """ self.logger.info("Perform an iteration of a naive MPC controller") if prediction_horizon < 5: - self.logger.error("Set the MPC prediction horizon to at least 5 times the optimization time step") + self.logger.error( + "Set the MPC prediction horizon to at least 5 times the optimization time step" + ) return pd.DataFrame() else: - df_input_data = copy.deepcopy(df_input_data)[df_input_data.index[0]:df_input_data.index[prediction_horizon-1]] - unit_load_cost = df_input_data[self.var_load_cost].values # €/kWh - unit_prod_price = df_input_data[self.var_prod_price].values # €/kWh + df_input_data = copy.deepcopy(df_input_data)[ + df_input_data.index[0] : df_input_data.index[prediction_horizon - 1] + ] + unit_load_cost = df_input_data[self.var_load_cost].values # €/kWh + unit_prod_price = df_input_data[self.var_prod_price].values # €/kWh # Call optimization function - self.opt_res = self.perform_optimization(df_input_data, P_PV.values.ravel(), P_load.values.ravel(), - unit_load_cost, unit_prod_price, soc_init=soc_init, - soc_final=soc_final, def_total_hours=def_total_hours, - def_start_timestep=def_start_timestep, def_end_timestep=def_end_timestep) + self.opt_res = self.perform_optimization( + df_input_data, + P_PV.values.ravel(), + P_load.values.ravel(), + unit_load_cost, + unit_prod_price, + soc_init=soc_init, + soc_final=soc_final, + def_total_hours=def_total_hours, + def_start_timestep=def_start_timestep, + def_end_timestep=def_end_timestep, + ) return self.opt_res @staticmethod - def validate_def_timewindow(start: int, end: int, min_steps: int, window: int) -> Tuple[int,int,str]: + def validate_def_timewindow( + start: int, end: int, min_steps: int, window: int + ) -> Tuple[int, int, str]: r""" Helper function to validate (and if necessary: correct) the defined optimization window of a deferrable load. - + :param start: Start timestep of the optimization window of the deferrable load :type start: int :param end: End timestep of the optimization window of the deferrable load @@ -887,7 +1433,7 @@ def validate_def_timewindow(start: int, end: int, min_steps: int, window: int) - end_validated = max(0, min(window, end)) if end_validated > 0: # If the available timeframe is shorter than the number of timesteps needed to meet the hours to operate (def_total_hours), issue a warning. - if (end_validated-start_validated) < min_steps: + if (end_validated - start_validated) < min_steps: warning = "Available timeframe is shorter than the specified number of hours to operate. Optimization will fail." else: warning = "Invalid timeframe for deferrable load (start timestep is not <= end timestep). Continuing optimization without timewindow constraint." diff --git a/src/emhass/retrieve_hass.py b/src/emhass/retrieve_hass.py index 71f05a6e..2e4e5fb9 100644 --- a/src/emhass/retrieve_hass.py +++ b/src/emhass/retrieve_hass.py @@ -1,13 +1,14 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -import json import copy -import os -import pathlib import datetime +import json import logging +import os +import pathlib from typing import Optional + import numpy as np import pandas as pd from requests import get, post @@ -32,9 +33,17 @@ class RetrieveHass: """ - def __init__(self, hass_url: str, long_lived_token: str, freq: pd.Timedelta, - time_zone: datetime.timezone, params: str, emhass_conf: dict, logger: logging.Logger, - get_data_from_file: Optional[bool] = False) -> None: + def __init__( + self, + hass_url: str, + long_lived_token: str, + freq: pd.Timedelta, + time_zone: datetime.timezone, + params: str, + emhass_conf: dict, + logger: logging.Logger, + get_data_from_file: Optional[bool] = False, + ) -> None: """ Define constructor for RetrieveHass class. @@ -75,19 +84,24 @@ def __init__(self, hass_url: str, long_lived_token: str, freq: pd.Timedelta, def get_ha_config(self): """ Extract some configuration data from HA. - + """ headers = { "Authorization": "Bearer " + self.long_lived_token, - "content-type": "application/json" - } - url = self.hass_url+"api/config" + "content-type": "application/json", + } + url = self.hass_url + "api/config" response_config = get(url, headers=headers) self.ha_config = response_config.json() - - def get_data(self, days_list: pd.date_range, var_list: list, - minimal_response: Optional[bool] = False, significant_changes_only: Optional[bool] = False, - test_url: Optional[str] = "empty") -> None: + + def get_data( + self, + days_list: pd.date_range, + var_list: list, + minimal_response: Optional[bool] = False, + significant_changes_only: Optional[bool] = False, + test_url: Optional[str] = "empty", + ) -> None: r""" Retrieve the actual data from hass. @@ -113,15 +127,17 @@ def get_data(self, days_list: pd.date_range, var_list: list, self.logger.info("Retrieve hass get data method initiated...") headers = { "Authorization": "Bearer " + self.long_lived_token, - "content-type": "application/json" - } + "content-type": "application/json", + } # Looping on each day from days list self.df_final = pd.DataFrame() x = 0 # iterate based on days for day in days_list: for i, var in enumerate(var_list): if test_url == "empty": - if (self.hass_url == "http://supervisor/core/api"): # If we are using the supervisor API + if ( + self.hass_url == "http://supervisor/core/api" + ): # If we are using the supervisor API url = ( self.hass_url + "/history/period/" @@ -139,7 +155,7 @@ def get_data(self, days_list: pd.date_range, var_list: list, ) if minimal_response: # A support for minimal response url = url + "?minimal_response" - if (significant_changes_only): # And for signicant changes only (check the HASS restful API for more info) + if significant_changes_only: # And for signicant changes only (check the HASS restful API for more info) url = url + "?significant_changes_only" else: url = test_url @@ -172,9 +188,19 @@ def get_data(self, days_list: pd.date_range, var_list: list, data = response.json()[0] except IndexError: if x == 0: - self.logger.error("The retrieved JSON is empty, A sensor:" + var + " may have 0 days of history, passed sensor may not be correct, or days to retrieve is set too heigh") + self.logger.error( + "The retrieved JSON is empty, A sensor:" + + var + + " may have 0 days of history, passed sensor may not be correct, or days to retrieve is set too heigh" + ) else: - self.logger.error("The retrieved JSON is empty for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)") + self.logger.error( + "The retrieved JSON is empty for day:" + + str(day) + + ", days_to_retrieve may be larger than the recorded history of sensor:" + + var + + " (check your recorder settings)" + ) return False df_raw = pd.DataFrame.from_dict(data) # self.logger.info(str(df_raw)) @@ -186,17 +212,41 @@ def get_data(self, days_list: pd.date_range, var_list: list, + " may have 0 days of history or passed sensor may not be correct" ) else: - self.logger.error("Retrieved empty Dataframe for day:"+ str(day) +", days_to_retrieve may be larger than the recorded history of sensor:" + var + " (check your recorder settings)") + self.logger.error( + "Retrieved empty Dataframe for day:" + + str(day) + + ", days_to_retrieve may be larger than the recorded history of sensor:" + + var + + " (check your recorder settings)" + ) return False # self.logger.info(self.freq.seconds) - if len(df_raw) < ((60 / (self.freq.seconds / 60)) * 24) and x != len(days_list) -1: #check if there is enough Dataframes for passed frequency per day (not inc current day) - self.logger.debug("sensor:" + var + " retrieved Dataframe count: " + str(len(df_raw)) + ", on day: " + str(day) + ". This is less than freq value passed: " + str(self.freq)) - if i == 0: # Defining the DataFrame container - from_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").min() - to_date = pd.to_datetime(df_raw['last_changed'], format="ISO8601").max() - ts = pd.to_datetime(pd.date_range(start=from_date, end=to_date, freq=self.freq), - format='%Y-%d-%m %H:%M').round(self.freq, ambiguous='infer', nonexistent='shift_forward') - df_day = pd.DataFrame(index = ts) + if ( + len(df_raw) < ((60 / (self.freq.seconds / 60)) * 24) + and x != len(days_list) - 1 + ): # check if there is enough Dataframes for passed frequency per day (not inc current day) + self.logger.debug( + "sensor:" + + var + + " retrieved Dataframe count: " + + str(len(df_raw)) + + ", on day: " + + str(day) + + ". This is less than freq value passed: " + + str(self.freq) + ) + if i == 0: # Defining the DataFrame container + from_date = pd.to_datetime( + df_raw["last_changed"], format="ISO8601" + ).min() + to_date = pd.to_datetime( + df_raw["last_changed"], format="ISO8601" + ).max() + ts = pd.to_datetime( + pd.date_range(start=from_date, end=to_date, freq=self.freq), + format="%Y-%d-%m %H:%M", + ).round(self.freq, ambiguous="infer", nonexistent="shift_forward") + df_day = pd.DataFrame(index=ts) # Caution with undefined string data: unknown, unavailable, etc. df_tp = ( df_raw.copy()[["state"]] @@ -212,16 +262,26 @@ def get_data(self, days_list: pd.date_range, var_list: list, df_tp = df_tp.resample(self.freq).mean() df_day = pd.concat([df_day, df_tp], axis=1) self.df_final = pd.concat([self.df_final, df_day], axis=0) - x += 1 + x += 1 self.df_final = set_df_index_freq(self.df_final) if self.df_final.index.freq != self.freq: - self.logger.error("The inferred freq:" + str(self.df_final.index.freq) + " from data is not equal to the defined freq in passed:" + str(self.freq)) + self.logger.error( + "The inferred freq:" + + str(self.df_final.index.freq) + + " from data is not equal to the defined freq in passed:" + + str(self.freq) + ) return False return True - - - def prepare_data(self, var_load: str, load_negative: Optional[bool] = False, set_zero_min: Optional[bool] = True, - var_replace_zero: Optional[list] = None, var_interp: Optional[list] = None) -> None: + + def prepare_data( + self, + var_load: str, + load_negative: Optional[bool] = False, + set_zero_min: Optional[bool] = True, + var_replace_zero: Optional[list] = None, + var_interp: Optional[list] = None, + ) -> None: r""" Apply some data treatment in preparation for the optimization task. @@ -298,8 +358,15 @@ def prepare_data(self, var_load: str, load_negative: Optional[bool] = False, set return True @staticmethod - def get_attr_data_dict(data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_measurement: str, - friendly_name: str, list_name: str, state: float) -> dict: + def get_attr_data_dict( + data_df: pd.DataFrame, + idx: int, + entity_id: str, + unit_of_measurement: str, + friendly_name: str, + list_name: str, + state: float, + ) -> dict: list_df = copy.deepcopy(data_df).loc[data_df.index[idx] :].reset_index() list_df.columns = ["timestamps", entity_id] ts_list = [str(i) for i in list_df["timestamps"].tolist()] @@ -320,11 +387,20 @@ def get_attr_data_dict(data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_ } return data - - def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_measurement: str, - friendly_name: str, type_var: str, from_mlforecaster: Optional[bool] = False, - publish_prefix: Optional[str] = "", save_entities: Optional[bool] = False, - logger_levels: Optional[str] = "info", dont_post: Optional[bool] = False) -> None: + def post_data( + self, + data_df: pd.DataFrame, + idx: int, + entity_id: str, + unit_of_measurement: str, + friendly_name: str, + type_var: str, + from_mlforecaster: Optional[bool] = False, + publish_prefix: Optional[str] = "", + save_entities: Optional[bool] = False, + logger_levels: Optional[str] = "info", + dont_post: Optional[bool] = False, + ) -> None: r""" Post passed data to hass. @@ -364,10 +440,10 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_mea headers = { "Authorization": "Bearer " + self.long_lived_token, "content-type": "application/json", - } + } # Preparing the data dict to be published if type_var == "cost_fun": - if isinstance(data_df.iloc[0],pd.Series): #if Series extract + if isinstance(data_df.iloc[0], pd.Series): # if Series extract data_df = data_df.iloc[:, 0] state = np.round(data_df.sum(), 2) elif type_var == "unit_load_cost" or type_var == "unit_prod_price": @@ -379,29 +455,85 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_mea else: state = np.round(data_df.loc[data_df.index[idx]], 2) if type_var == "power": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "forecasts", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "forecasts", + state, + ) elif type_var == "deferrable": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "deferrables_schedule", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "deferrables_schedule", + state, + ) elif type_var == "temperature": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "predicted_temperatures", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "predicted_temperatures", + state, + ) elif type_var == "batt": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "battery_scheduled_power", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "battery_scheduled_power", + state, + ) elif type_var == "SOC": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "battery_scheduled_soc", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "battery_scheduled_soc", + state, + ) elif type_var == "unit_load_cost": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "unit_load_cost_forecasts", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "unit_load_cost_forecasts", + state, + ) elif type_var == "unit_prod_price": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "unit_prod_price_forecasts", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "unit_prod_price_forecasts", + state, + ) elif type_var == "mlforecaster": - data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, - friendly_name, "scheduled_forecast", state) + data = RetrieveHass.get_attr_data_dict( + data_df, + idx, + entity_id, + unit_of_measurement, + friendly_name, + "scheduled_forecast", + state, + ) elif type_var == "optim_status": data = { "state": state, @@ -428,8 +560,10 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_mea } # Actually post the data if self.get_data_from_file or dont_post: + class response: pass + response.status_code = 200 response.ok = True else: @@ -437,42 +571,55 @@ class response: # Treating the response status and posting them on the logger if response.ok: - if logger_levels == "DEBUG": - self.logger.debug("Successfully posted to " + entity_id + " = " + str(state)) + self.logger.debug( + "Successfully posted to " + entity_id + " = " + str(state) + ) else: - self.logger.info("Successfully posted to " + entity_id + " = " + str(state)) + self.logger.info( + "Successfully posted to " + entity_id + " = " + str(state) + ) # If save entities is set, save entity data to /data_path/entities - if (save_entities): - entities_path = self.emhass_conf['data_path'] / "entities" - + if save_entities: + entities_path = self.emhass_conf["data_path"] / "entities" + # Clarify folder exists pathlib.Path(entities_path).mkdir(parents=True, exist_ok=True) - + # Save entity data to json file - result = data_df.to_json(index="timestamp", orient='index', date_unit='s', date_format='iso') + result = data_df.to_json( + index="timestamp", orient="index", date_unit="s", date_format="iso" + ) parsed = json.loads(result) - with open(entities_path / (entity_id + ".json"), "w") as file: + with open(entities_path / (entity_id + ".json"), "w") as file: json.dump(parsed, file, indent=4) - + # Save the required metadata to json file if os.path.isfile(entities_path / "metadata.json"): with open(entities_path / "metadata.json", "r") as file: - metadata = json.load(file) + metadata = json.load(file) else: metadata = {} - with open(entities_path / "metadata.json", "w") as file: - # Save entity metadata, key = entity_id - metadata[entity_id] = {'name': data_df.name, 'unit_of_measurement': unit_of_measurement,'friendly_name': friendly_name,'type_var': type_var, 'optimization_time_step': int(self.freq.seconds / 60)} - + with open(entities_path / "metadata.json", "w") as file: + # Save entity metadata, key = entity_id + metadata[entity_id] = { + "name": data_df.name, + "unit_of_measurement": unit_of_measurement, + "friendly_name": friendly_name, + "type_var": type_var, + "optimization_time_step": int(self.freq.seconds / 60), + } + # Find lowest frequency to set for continual loop freq - if metadata.get("lowest_time_step",None) == None or metadata["lowest_time_step"] > int(self.freq.seconds / 60): + if metadata.get("lowest_time_step", None) == None or metadata[ + "lowest_time_step" + ] > int(self.freq.seconds / 60): metadata["lowest_time_step"] = int(self.freq.seconds / 60) - json.dump(metadata,file, indent=4) + json.dump(metadata, file, indent=4) + + self.logger.debug("Saved " + entity_id + " to json file") - self.logger.debug("Saved " + entity_id + " to json file") - else: self.logger.warning( "The status code for received curl command response is: " diff --git a/src/emhass/utils.py b/src/emhass/utils.py index 0c769bfd..6a7e5013 100644 --- a/src/emhass/utils.py +++ b/src/emhass/utils.py @@ -1,26 +1,26 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- - +import ast +import copy import csv -import os -from typing import Tuple, Optional -from datetime import datetime, timedelta, timezone +import json import logging +import os import pathlib -import json -import copy +from datetime import datetime, timedelta, timezone +from typing import Optional, Tuple + import numpy as np import pandas as pd -from requests import get -import yaml +import plotly.express as px import pytz -import ast +import yaml +from requests import get -import plotly.express as px +from emhass.machine_learning_forecaster import MLForecaster pd.options.plotting.backend = "plotly" -from emhass.machine_learning_forecaster import MLForecaster def get_root(file: str, num_parent: Optional[int] = 3) -> str: """ @@ -44,8 +44,12 @@ def get_root(file: str, num_parent: Optional[int] = 3) -> str: return root -def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] = True, - logging_level: Optional[str] = "DEBUG") -> Tuple[logging.Logger, logging.StreamHandler]: +def get_logger( + fun_name: str, + emhass_conf: dict, + save_to_file: Optional[bool] = True, + logging_level: Optional[str] = "DEBUG", +) -> Tuple[logging.Logger, logging.StreamHandler]: """ Create a simple logger object. @@ -64,10 +68,10 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] = logger.propagate = True logger.fileSetting = save_to_file if save_to_file: - if os.path.isdir(emhass_conf['data_path']): - ch = logging.FileHandler(emhass_conf['data_path'] / 'logger_emhass.log') + if os.path.isdir(emhass_conf["data_path"]): + ch = logging.FileHandler(emhass_conf["data_path"] / "logger_emhass.log") else: - raise Exception("Unable to access data_path: "+emhass_conf['data_path']) + raise Exception("Unable to access data_path: " + emhass_conf["data_path"]) else: ch = logging.StreamHandler() if logging_level == "DEBUG": @@ -94,8 +98,12 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] = return logger, ch -def get_forecast_dates(freq: int, delta_forecast: int, time_zone: datetime.tzinfo, timedelta_days: Optional[int] = 0 - ) -> pd.core.indexes.datetimes.DatetimeIndex: +def get_forecast_dates( + freq: int, + delta_forecast: int, + time_zone: datetime.tzinfo, + timedelta_days: Optional[int] = 0, +) -> pd.core.indexes.datetimes.DatetimeIndex: """ Get the date_range list of the needed future dates using the delta_forecast parameter. @@ -110,17 +118,36 @@ def get_forecast_dates(freq: int, delta_forecast: int, time_zone: datetime.tzinf """ freq = pd.to_timedelta(freq, "minutes") - start_forecast = pd.Timestamp(datetime.now()).replace(hour=0, minute=0, second=0, microsecond=0) - end_forecast = (start_forecast + pd.Timedelta(days=delta_forecast)).replace(microsecond=0) - forecast_dates = pd.date_range(start=start_forecast, - end=end_forecast+timedelta(days=timedelta_days)-freq, - freq=freq, tz=time_zone).tz_convert('utc').round(freq, ambiguous='infer', nonexistent='shift_forward').tz_convert(time_zone) + start_forecast = pd.Timestamp(datetime.now()).replace( + hour=0, minute=0, second=0, microsecond=0 + ) + end_forecast = (start_forecast + pd.Timedelta(days=delta_forecast)).replace( + microsecond=0 + ) + forecast_dates = ( + pd.date_range( + start=start_forecast, + end=end_forecast + timedelta(days=timedelta_days) - freq, + freq=freq, + tz=time_zone, + ) + .tz_convert("utc") + .round(freq, ambiguous="infer", nonexistent="shift_forward") + .tz_convert(time_zone) + ) return forecast_dates -def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dict, optim_conf: dict, - plant_conf: dict, set_type: str, logger: logging.Logger - ) -> Tuple[str, dict]: +def treat_runtimeparams( + runtimeparams: str, + params: str, + retrieve_hass_conf: dict, + optim_conf: dict, + plant_conf: dict, + set_type: str, + logger: logging.Logger, + emhass_conf: dict, +) -> Tuple[str, dict]: """ Treat the passed optimization runtime parameters. @@ -128,31 +155,38 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic :type runtimeparams: str :param params: Built configuration parameters :type params: str - :param retrieve_hass_conf: Container for data retrieving parameters. + :param retrieve_hass_conf: Config dictionary for data retrieving parameters. :type retrieve_hass_conf: dict - :param optim_conf: Container for optimization parameters. + :param optim_conf: Config dictionary for optimization parameters. :type optim_conf: dict - :param plant_conf: Container for technical plant parameters. + :param plant_conf: Config dictionary for technical plant parameters. :type plant_conf: dict :param set_type: The type of action to be performed. :type set_type: str :param logger: The logger object. :type logger: logging.Logger + :param emhass_conf: Dictionary containing the needed emhass paths + :type emhass_conf: dict :return: Returning the params and optimization parameter container. :rtype: Tuple[str, dict] """ - # check if passed params is a dict + # Check if passed params is a dict if (params != None) and (params != "null"): if type(params) is str: params = json.loads(params) else: params = {} + # Merge current config categories to params + params["retrieve_hass_conf"].update(retrieve_hass_conf) + params["optim_conf"].update(optim_conf) + params["plant_conf"].update(plant_conf) + # Some default data needed custom_deferrable_forecast_id = [] custom_predicted_temperature_id = [] - for k in range(optim_conf['number_of_deferrable_loads']): + for k in range(params["optim_conf"]["number_of_deferrable_loads"]): custom_deferrable_forecast_id.append( { "entity_id": "sensor.p_deferrable{}".format(k), @@ -233,18 +267,132 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic else: params["passed_data"] = default_passed_dict + # If any runtime parameters where passed in action call if runtimeparams is not None: if type(runtimeparams) is str: runtimeparams = json.loads(runtimeparams) - # Format required date/time parameters - optimization_time_step = int( - retrieve_hass_conf['optimization_time_step'].seconds / 60.0) - delta_forecast = int(optim_conf['delta_forecast_daily'].days) - time_zone = retrieve_hass_conf["time_zone"] + + # Loop though parameters stored in association file, Check to see if any stored in runtime + # If true, set runtime parameter to params + if emhass_conf["associations_path"].exists(): + with emhass_conf["associations_path"].open("r") as data: + associations = list(csv.reader(data, delimiter=",")) + # Association file key reference + # association[0] = config categories + # association[1] = legacy parameter name + # association[2] = parameter (config.json/config_defaults.json) + # association[3] = parameter list name if exists (not used, from legacy options.json) + for association in associations: + # Check parameter name exists in runtime + if runtimeparams.get(association[2], None) is not None: + params[association[0]][association[2]] = runtimeparams[ + association[2] + ] + # Check Legacy parameter name runtime + elif runtimeparams.get(association[1], None) is not None: + params[association[0]][association[2]] = runtimeparams[ + association[1] + ] + else: + logger.warning( + "Cant find associations file (associations.csv) in: " + + str(emhass_conf["associations_path"]) + ) + + # Generate forecast_dates + if ( + "optimization_time_step" in runtimeparams.keys() + or "freq" in runtimeparams.keys() + ): + optimization_time_step = int( + runtimeparams.get("optimization_time_step", runtimeparams.get("freq")) + ) + params["retrieve_hass_conf"]["optimization_time_step"] = pd.to_timedelta( + optimization_time_step + ) + else: + optimization_time_step = int( + params["retrieve_hass_conf"]["optimization_time_step"].seconds / 60.0 + ) + if ( + runtimeparams.get("delta_forecast_daily", None) is not None + or runtimeparams.get("delta_forecast", None) is not None + ): + delta_forecast = int( + runtimeparams.get( + "delta_forecast_daily", runtimeparams["delta_forecast"] + ) + ) + params["optim_conf"]["delta_forecast_daily"] = pd.Timedelta( + days=optim_conf["delta_forecast_daily"] + ) + else: + delta_forecast = int(params["optim_conf"]["delta_forecast_daily"].days) + if runtimeparams.get("time_zone", None) is not None: + time_zone = pytz.timezone(params["retrieve_hass_conf"]["time_zone"]) + params["retrieve_hass_conf"]["time_zone"] = time_zone + else: + time_zone = params["retrieve_hass_conf"]["time_zone"] + forecast_dates = get_forecast_dates( - optimization_time_step, delta_forecast, time_zone) - - # regressor-model-fit + optimization_time_step, delta_forecast, time_zone + ) + + # Treat passed forecast data lists + list_forecast_key = [ + "pv_power_forecast", + "load_power_forecast", + "load_cost_forecast", + "prod_price_forecast", + "outdoor_temperature_forecast", + ] + forecast_methods = [ + "weather_forecast_method", + "load_forecast_method", + "load_cost_forecast_method", + "production_price_forecast_method", + "outdoor_temperature_forecast_method", + ] + + # Loop forecasts, check if value is a list and greater than or equal to forecast_dates + for method, forecast_key in enumerate(list_forecast_key): + if forecast_key in runtimeparams.keys(): + if isinstance(runtimeparams[forecast_key], list) and len( + runtimeparams[forecast_key] + ) >= len(forecast_dates): + params["passed_data"][forecast_key] = runtimeparams[forecast_key] + params["optim_conf"][forecast_methods[method]] = "list" + else: + logger.error( + f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}" + ) + logger.error( + f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}" + ) + # Check if string contains list, if so extract + if isinstance(runtimeparams[forecast_key], str): + if isinstance(ast.literal_eval(runtimeparams[forecast_key]), list): + runtimeparams[forecast_key] = ast.literal_eval( + runtimeparams[forecast_key] + ) + list_non_digits = [ + x + for x in runtimeparams[forecast_key] + if not (isinstance(x, int) or isinstance(x, float)) + ] + if len(list_non_digits) > 0: + logger.warning( + f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)" + ) + for x in list_non_digits: + logger.warning( + f"This value in {forecast_key} was detected as non digits: {str(x)}" + ) + else: + params["passed_data"][forecast_key] = None + + # Add runtime exclusive (not in config) parameters to params + # regressor-model-fit if set_type == "regressor-model-fit": if "csv_file" in runtimeparams: csv_file = runtimeparams["csv_file"] @@ -265,7 +413,7 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic else: date_features = runtimeparams["date_features"] params["passed_data"]["date_features"] = date_features - + # regressor-model-predict if set_type == "regressor-model-predict": if "new_values" in runtimeparams: @@ -280,101 +428,80 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic if "target" in runtimeparams: target = runtimeparams["target"] params["passed_data"]["target"] = target - - # Treating special data passed for MPC control case + + # MPC control case if set_type == "naive-mpc-optim": if "prediction_horizon" not in runtimeparams.keys(): prediction_horizon = 10 # 10 time steps by default else: prediction_horizon = runtimeparams["prediction_horizon"] params["passed_data"]["prediction_horizon"] = prediction_horizon - if 'soc_init' not in runtimeparams.keys(): - soc_init = plant_conf['battery_target_state_of_charge'] + if "soc_init" not in runtimeparams.keys(): + soc_init = params["plant_conf"]["battery_target_state_of_charge"] else: soc_init = runtimeparams["soc_init"] params["passed_data"]["soc_init"] = soc_init if "soc_final" not in runtimeparams.keys(): - soc_final = plant_conf['battery_target_state_of_charge'] + soc_final = params["plant_conf"]["battery_target_state_of_charge"] else: soc_final = runtimeparams["soc_final"] params["passed_data"]["soc_final"] = soc_final - if 'operating_hours_of_each_deferrable_load' not in runtimeparams.keys() and 'def_total_hours' not in runtimeparams.keys(): - def_total_hours = optim_conf.get('operating_hours_of_each_deferrable_load') - else: - def_total_hours = runtimeparams.get( - 'operating_hours_of_each_deferrable_load', runtimeparams.get('def_total_hours')) - params["passed_data"]['operating_hours_of_each_deferrable_load'] = def_total_hours - if 'start_timesteps_of_each_deferrable_load' not in runtimeparams.keys() and 'def_start_timestep' in runtimeparams.keys(): - def_start_timestep = optim_conf.get('start_timesteps_of_each_deferrable_load') - else: - def_start_timestep = runtimeparams.get( - 'start_timesteps_of_each_deferrable_load', runtimeparams.get('def_start_timestep')) - params["passed_data"]['start_timesteps_of_each_deferrable_load'] = def_start_timestep - if 'end_timesteps_of_each_deferrable_load' not in runtimeparams.keys() and 'def_end_timestep' not in runtimeparams.keys(): - def_end_timestep = optim_conf.get('end_timesteps_of_each_deferrable_load') - else: - def_end_timestep = runtimeparams.get( - 'end_timesteps_of_each_deferrable_load', runtimeparams.get('def_end_timestep')) - params["passed_data"]['end_timesteps_of_each_deferrable_load'] = def_end_timestep + forecast_dates = copy.deepcopy(forecast_dates)[0:prediction_horizon] + # Load the default config - if "def_load_config" in optim_conf: - for k in range(len(optim_conf["def_load_config"])): - if "thermal_config" in optim_conf["def_load_config"][k]: - if ("heater_desired_temperatures" in runtimeparams and len(runtimeparams["heater_desired_temperatures"]) > k): - optim_conf["def_load_config"][k]["thermal_config"]["desired_temperatures"] = runtimeparams["heater_desired_temperatures"][k] - if ("heater_start_temperatures" in runtimeparams and len(runtimeparams["heater_start_temperatures"]) > k): - optim_conf["def_load_config"][k]["thermal_config"]["start_temperature"] = runtimeparams["heater_start_temperatures"][k] + if "def_load_config" in runtimeparams: + params["optim_conf"]["def_load_config"] = runtimeparams[ + "def_load_config" + ] + if "def_load_config" in params["optim_conf"]: + for k in range(len(params["optim_conf"]["def_load_config"])): + if "thermal_config" in params["optim_conf"]["def_load_config"][k]: + if ( + "heater_desired_temperatures" in runtimeparams + and len(runtimeparams["heater_desired_temperatures"]) > k + ): + params["optim_conf"]["def_load_config"][k][ + "thermal_config" + ]["desired_temperatures"] = runtimeparams[ + "heater_desired_temperatures" + ][k] + if ( + "heater_start_temperatures" in runtimeparams + and len(runtimeparams["heater_start_temperatures"]) > k + ): + params["optim_conf"]["def_load_config"][k][ + "thermal_config" + ]["start_temperature"] = runtimeparams[ + "heater_start_temperatures" + ][k] else: params["passed_data"]["prediction_horizon"] = None params["passed_data"]["soc_init"] = None params["passed_data"]["soc_final"] = None - params["passed_data"]['operating_hours_of_each_deferrable_load'] = None - params["passed_data"]['start_timesteps_of_each_deferrable_load'] = None - params["passed_data"]['end_timesteps_of_each_deferrable_load'] = None - # Treat passed forecast data lists - list_forecast_key = ['pv_power_forecast', 'load_power_forecast', - 'load_cost_forecast', 'prod_price_forecast', 'outdoor_temperature_forecast'] - forecast_methods = ['weather_forecast_method', 'load_forecast_method', 'load_cost_forecast_method', - 'production_price_forecast_method', 'outdoor_temperature_forecast_method'] - - # Loop forecasts, check if value is a list and greater than or equal to forecast_dates - for method, forecast_key in enumerate(list_forecast_key): - if forecast_key in runtimeparams.keys(): - if type(runtimeparams[forecast_key]) == list and len(runtimeparams[forecast_key]) >= len(forecast_dates): - params['passed_data'][forecast_key] = runtimeparams[forecast_key] - optim_conf[forecast_methods[method]] = 'list' - else: - logger.error( - f"ERROR: The passed data is either not a list or the length is not correct, length should be {str(len(forecast_dates))}") - logger.error( - f"Passed type is {str(type(runtimeparams[forecast_key]))} and length is {str(len(runtimeparams[forecast_key]))}") - # Check if string contains list, if so extract - if type(runtimeparams[forecast_key]) == str: - if type(ast.literal_eval(runtimeparams[forecast_key])) == list: - runtimeparams[forecast_key] = ast.literal_eval(runtimeparams[forecast_key]) - list_non_digits = [x for x in runtimeparams[forecast_key] if not ( - isinstance(x, int) or isinstance(x, float))] - if len(list_non_digits) > 0: - logger.warning( - f"There are non numeric values on the passed data for {forecast_key}, check for missing values (nans, null, etc)") - for x in list_non_digits: - logger.warning( - f"This value in {forecast_key} was detected as non digits: {str(x)}") - else: - params['passed_data'][forecast_key] = None - + # Treat passed data for forecast model fit/predict/tune at runtime - if 'historic_days_to_retrieve' not in runtimeparams.keys() and 'days_to_retrieve' not in runtimeparams.keys(): - historic_days_to_retrieve = retrieve_hass_conf.get('historic_days_to_retrieve') + if ( + params["passed_data"].get("historic_days_to_retrieve", None) is not None + and params["passed_data"]["historic_days_to_retrieve"] < 9 + ): + logger.warning( + "warning `days_to_retrieve` is set to a value less than 9, this could cause an error with the fit" + ) + logger.warning( + "setting`passed_data:days_to_retrieve` to 9 for fit/predict/tune" + ) + params["passed_data"]["historic_days_to_retrieve"] = 9 else: - historic_days_to_retrieve = runtimeparams.get( - 'historic_days_to_retrieve', runtimeparams.get('days_to_retrieve')) - if historic_days_to_retrieve < 9: - logger.warning("`days_to_retrieve` is set to a value less than 9, this could cause an error with the fit") - logger.warning("setting fit/predict/tune `days_to_retrieve` to 9") - historic_days_to_retrieve = 9 - params["passed_data"]['historic_days_to_retrieve'] = historic_days_to_retrieve + if params["retrieve_hass_conf"].get("historic_days_to_retrieve", 0) < 9: + logger.debug( + "setting`passed_data:days_to_retrieve` to 9 for fit/predict/tune" + ) + params["passed_data"]["historic_days_to_retrieve"] = 9 + else: + params["passed_data"]["historic_days_to_retrieve"] = params[ + "retrieve_hass_conf" + ]["historic_days_to_retrieve"] if "model_type" not in runtimeparams.keys(): model_type = "load_forecast" else: @@ -409,13 +536,15 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic perform_backtest = False else: perform_backtest = ast.literal_eval( - str(runtimeparams["perform_backtest"]).capitalize()) + str(runtimeparams["perform_backtest"]).capitalize() + ) params["passed_data"]["perform_backtest"] = perform_backtest if "model_predict_publish" not in runtimeparams.keys(): model_predict_publish = False else: model_predict_publish = ast.literal_eval( - str(runtimeparams["model_predict_publish"]).capitalize()) + str(runtimeparams["model_predict_publish"]).capitalize() + ) params["passed_data"]["model_predict_publish"] = model_predict_publish if "model_predict_entity_id" not in runtimeparams.keys(): model_predict_entity_id = "sensor.p_load_forecast_custom_model" @@ -425,13 +554,19 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic if "model_predict_unit_of_measurement" not in runtimeparams.keys(): model_predict_unit_of_measurement = "W" else: - model_predict_unit_of_measurement = runtimeparams["model_predict_unit_of_measurement"] - params["passed_data"]["model_predict_unit_of_measurement"] = model_predict_unit_of_measurement + model_predict_unit_of_measurement = runtimeparams[ + "model_predict_unit_of_measurement" + ] + params["passed_data"]["model_predict_unit_of_measurement"] = ( + model_predict_unit_of_measurement + ) if "model_predict_friendly_name" not in runtimeparams.keys(): model_predict_friendly_name = "Load Power Forecast custom ML model" else: model_predict_friendly_name = runtimeparams["model_predict_friendly_name"] - params["passed_data"]["model_predict_friendly_name"] = model_predict_friendly_name + params["passed_data"]["model_predict_friendly_name"] = ( + model_predict_friendly_name + ) if "mlr_predict_entity_id" not in runtimeparams.keys(): mlr_predict_entity_id = "sensor.mlr_predict" else: @@ -440,14 +575,18 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic if "mlr_predict_unit_of_measurement" not in runtimeparams.keys(): mlr_predict_unit_of_measurement = None else: - mlr_predict_unit_of_measurement = runtimeparams["mlr_predict_unit_of_measurement"] - params["passed_data"]["mlr_predict_unit_of_measurement"] = mlr_predict_unit_of_measurement + mlr_predict_unit_of_measurement = runtimeparams[ + "mlr_predict_unit_of_measurement" + ] + params["passed_data"]["mlr_predict_unit_of_measurement"] = ( + mlr_predict_unit_of_measurement + ) if "mlr_predict_friendly_name" not in runtimeparams.keys(): mlr_predict_friendly_name = "mlr predictor" else: mlr_predict_friendly_name = runtimeparams["mlr_predict_friendly_name"] params["passed_data"]["mlr_predict_friendly_name"] = mlr_predict_friendly_name - + # Treat passed data for other parameters if "alpha" not in runtimeparams.keys(): alpha = 0.5 @@ -459,24 +598,30 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic else: beta = runtimeparams["beta"] params["passed_data"]["beta"] = beta + # Param to save forecast cache (i.e. Solcast) if "weather_forecast_cache" not in runtimeparams.keys(): weather_forecast_cache = False else: weather_forecast_cache = runtimeparams["weather_forecast_cache"] params["passed_data"]["weather_forecast_cache"] = weather_forecast_cache + # Param to make sure optimization only uses cached data. (else produce error) if "weather_forecast_cache_only" not in runtimeparams.keys(): weather_forecast_cache_only = False else: weather_forecast_cache_only = runtimeparams["weather_forecast_cache_only"] - params["passed_data"]["weather_forecast_cache_only"] = weather_forecast_cache_only + params["passed_data"]["weather_forecast_cache_only"] = ( + weather_forecast_cache_only + ) + # A condition to manually save entity data under data_path/entities after optimization if "entity_save" not in runtimeparams.keys(): entity_save = "" else: entity_save = runtimeparams["entity_save"] params["passed_data"]["entity_save"] = entity_save + # A condition to put a prefix on all published data, or check for saved data under prefix name if "publish_prefix" not in runtimeparams.keys(): publish_prefix = "" @@ -485,83 +630,25 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic params["passed_data"]["publish_prefix"] = publish_prefix # Treat optimization (optim_conf) configuration parameters passed at runtime - if 'number_of_deferrable_loads' in runtimeparams.keys() or 'num_def_loads' in runtimeparams.keys(): - optim_conf['number_of_deferrable_loads'] = runtimeparams.get( - 'number_of_deferrable_loads', runtimeparams.get('num_def_loads')) - if 'nominal_power_of_deferrable_loads' in runtimeparams.keys() or 'P_deferrable_nom' in runtimeparams.keys(): - optim_conf['nominal_power_of_deferrable_loads'] = runtimeparams.get( - 'nominal_power_of_deferrable_loads', runtimeparams.get('P_deferrable_nom')) - if 'operating_hours_of_each_deferrable_load' in runtimeparams.keys() or 'def_total_hours' in runtimeparams.keys(): - optim_conf['operating_hours_of_each_deferrable_load'] = runtimeparams.get( - 'operating_hours_of_each_deferrable_load', runtimeparams.get('def_total_hours')) - if 'start_timesteps_of_each_deferrable_load' in runtimeparams.keys() or 'def_start_timestep' in runtimeparams.keys(): - optim_conf['start_timesteps_of_each_deferrable_load'] = runtimeparams.get( - 'start_timesteps_of_each_deferrable_load', runtimeparams.get('def_start_timestep')) - if 'end_timesteps_of_each_deferrable_load' in runtimeparams.keys() or 'def_end_timestep' in runtimeparams.keys(): - optim_conf['end_timesteps_of_each_deferrable_load'] = runtimeparams.get( - 'end_timesteps_of_each_deferrable_load', runtimeparams.get('def_end_timestep')) if "def_current_state" in runtimeparams.keys(): - optim_conf["def_current_state"] = [ - bool(s) for s in runtimeparams["def_current_state"]] - if 'treat_deferrable_load_as_semi_cont' in runtimeparams.keys() or 'treat_def_as_semi_cont' in runtimeparams.keys(): - optim_conf['treat_deferrable_load_as_semi_cont'] = [ - ast.literal_eval(str(k).capitalize()) for k in runtimeparams.get('treat_deferrable_load_as_semi_cont',runtimeparams.get('treat_def_as_semi_cont')) - ] - if 'set_deferrable_load_single_constant' in runtimeparams.keys() or 'set_def_constant' in runtimeparams.keys(): - optim_conf['set_deferrable_load_single_constant'] = [ - ast.literal_eval(str(k).capitalize()) for k in runtimeparams.get('set_deferrable_load_single_constant',runtimeparams.get('set_def_constant')) + params["optim_conf"]["def_current_state"] = [ + bool(s) for s in runtimeparams["def_current_state"] ] - if 'set_deferrable_startup_penalty' in runtimeparams.keys() or 'def_start_penalty' in runtimeparams.keys(): - optim_conf['set_deferrable_startup_penalty'] = [ - ast.literal_eval(str(k).capitalize()) for k in runtimeparams.get('set_deferrable_startup_penalty',runtimeparams.get('def_start_penalty')) - ] - if 'def_load_config' in runtimeparams: - optim_conf["def_load_config"] = runtimeparams['def_load_config'] - if 'weight_battery_discharge' in runtimeparams.keys(): - optim_conf['weight_battery_discharge'] = runtimeparams[ - 'weight_battery_discharge' - ] - if 'weight_battery_charge' in runtimeparams.keys(): - optim_conf['weight_battery_charge'] = runtimeparams['weight_battery_charge'] # Treat retrieve data from Home Assistant (retrieve_hass_conf) configuration parameters passed at runtime - if 'optimization_time_step' in runtimeparams.keys() or 'freq' in runtimeparams.keys(): - retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(runtimeparams.get( - 'optimization_time_step', runtimeparams.get('freq')), "minutes") - if 'continual_publish' in runtimeparams.keys(): - retrieve_hass_conf['continual_publish'] = bool( - runtimeparams['continual_publish']) + # Secrets passed at runtime if "solcast_api_key" in runtimeparams.keys(): - retrieve_hass_conf["solcast_api_key"] = runtimeparams["solcast_api_key"] - optim_conf['weather_forecast_method'] = "solcast" + params["retrieve_hass_conf"]["solcast_api_key"] = runtimeparams[ + "solcast_api_key" + ] if "solcast_rooftop_id" in runtimeparams.keys(): - retrieve_hass_conf["solcast_rooftop_id"] = runtimeparams[ + params["retrieve_hass_conf"]["solcast_rooftop_id"] = runtimeparams[ "solcast_rooftop_id" ] - optim_conf['weather_forecast_method'] = "solcast" if "solar_forecast_kwp" in runtimeparams.keys(): - retrieve_hass_conf["solar_forecast_kwp"] = runtimeparams[ + params["retrieve_hass_conf"]["solar_forecast_kwp"] = runtimeparams[ "solar_forecast_kwp" ] - optim_conf['weather_forecast_method'] = "solar.forecast" - - # Treat system model parameters (plant) configuration parameters passed at runtime - if 'battery_minimum_state_of_charge' in runtimeparams.keys() or 'SOCmin' in runtimeparams.keys(): - plant_conf['battery_minimum_state_of_charge'] = runtimeparams.get( - 'battery_minimum_state_of_charge', runtimeparams.get('SOCmin')) - if 'battery_maximum_state_of_charge' in runtimeparams.keys() or 'SOCmax' in runtimeparams.keys(): - plant_conf['battery_maximum_state_of_charge'] = runtimeparams.get( - 'battery_maximum_state_of_charge', runtimeparams.get('SOCmax')) - if 'battery_target_state_of_charge' in runtimeparams.keys() or 'SOCtarget' in runtimeparams.keys(): - plant_conf['battery_target_state_of_charge'] = runtimeparams.get( - 'battery_target_state_of_charge', runtimeparams.get('SOCtarget')) - if 'battery_discharge_power_max' in runtimeparams.keys() or 'Pd_max' in runtimeparams.keys(): - plant_conf['battery_discharge_power_max'] = runtimeparams.get( - 'battery_discharge_power_max', runtimeparams.get('Pd_max')) - if 'battery_charge_power_max' in runtimeparams.keys() or 'Pc_max' in runtimeparams.keys(): - plant_conf['battery_charge_power_max'] = runtimeparams.get( - 'battery_charge_power_max', runtimeparams.get('Pc_max')) - # Treat custom entities id's and friendly names for variables if "custom_pv_forecast_id" in runtimeparams.keys(): params["passed_data"]["custom_pv_forecast_id"] = runtimeparams[ @@ -615,7 +702,12 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic params["passed_data"]["custom_predicted_temperature_id"] = runtimeparams[ "custom_predicted_temperature_id" ] - + + # split config categories from params + retrieve_hass_conf = params["retrieve_hass_conf"] + optim_conf = params["optim_conf"] + plant_conf = params["plant_conf"] + # Serialize the final params params = json.dumps(params, default=str) return params, retrieve_hass_conf, optim_conf, plant_conf @@ -623,8 +715,8 @@ def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dic def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dict]: """ - Perform parsing of the params into the configuration catagories - + Perform parsing of the params into the configuration catagories + :param params: Built configuration parameters :type params: str :param logger: The logger object @@ -648,15 +740,20 @@ def get_yaml_parse(params: str, logger: logging.Logger) -> Tuple[dict, dict, dic plant_conf = input_conf.get("plant_conf", {}) # Format time parameters - if optim_conf.get('delta_forecast_daily',None) is not None: - optim_conf['delta_forecast_daily'] = pd.Timedelta(days=optim_conf['delta_forecast_daily']) - if retrieve_hass_conf.get('optimization_time_step',None) is not None: - retrieve_hass_conf['optimization_time_step'] = pd.to_timedelta(retrieve_hass_conf['optimization_time_step'], "minutes") - if retrieve_hass_conf.get('time_zone',None) is not None: - retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"]) + if optim_conf.get("delta_forecast_daily", None) is not None: + optim_conf["delta_forecast_daily"] = pd.Timedelta( + days=optim_conf["delta_forecast_daily"] + ) + if retrieve_hass_conf.get("optimization_time_step", None) is not None: + retrieve_hass_conf["optimization_time_step"] = pd.to_timedelta( + retrieve_hass_conf["optimization_time_step"], "minutes" + ) + if retrieve_hass_conf.get("time_zone", None) is not None: + retrieve_hass_conf["time_zone"] = pytz.timezone(retrieve_hass_conf["time_zone"]) return retrieve_hass_conf, optim_conf, plant_conf + def get_injection_dict(df: pd.DataFrame, plot_size: Optional[int] = 1366) -> dict: """ Build a dictionary with graphs and tables for the webui. @@ -744,7 +841,9 @@ def get_injection_dict(df: pd.DataFrame, plot_size: Optional[int] = 1366) -> dic return injection_dict -def get_injection_dict_forecast_model_fit(df_fit_pred: pd.DataFrame, mlf: MLForecaster) -> dict: +def get_injection_dict_forecast_model_fit( + df_fit_pred: pd.DataFrame, mlf: MLForecaster +) -> dict: """ Build a dictionary with graphs and tables for the webui for special MLF fit case. @@ -773,7 +872,9 @@ def get_injection_dict_forecast_model_fit(df_fit_pred: pd.DataFrame, mlf: MLFore return injection_dict -def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLForecaster) -> dict: +def get_injection_dict_forecast_model_tune( + df_pred_optim: pd.DataFrame, mlf: MLForecaster +) -> dict: """ Build a dictionary with graphs and tables for the webui for special MLF tune case. @@ -803,10 +904,16 @@ def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLF injection_dict["figure_0"] = image_path_0 return injection_dict -def build_config(emhass_conf: dict, logger: logging.Logger, defaults_path: str, config_path: Optional[str] = None, - legacy_config_path: Optional[str] = None) -> dict: + +def build_config( + emhass_conf: dict, + logger: logging.Logger, + defaults_path: str, + config_path: Optional[str] = None, + legacy_config_path: Optional[str] = None, +) -> dict: """ - Retrieve parameters from configuration files. + Retrieve parameters from configuration files. priority order (low - high) = defaults_path, config_path legacy_config_path :param emhass_conf: Dictionary containing the needed emhass paths @@ -825,39 +932,48 @@ def build_config(emhass_conf: dict, logger: logging.Logger, defaults_path: str, # Read default parameters (default root_path/data/config_defaults.json) if defaults_path and pathlib.Path(defaults_path).is_file(): - with defaults_path.open('r') as data: + with defaults_path.open("r") as data: config = json.load(data) else: logger.error("config_defaults.json. does not exist ") return False - + # Read user config parameters if provided (default /share/config.json) if config_path and pathlib.Path(config_path).is_file(): - with config_path.open('r') as data: + with config_path.open("r") as data: # Set override default parameters (config_defaults) with user given parameters (config.json) logger.info("Obtaining parameters from config.json:") config.update(json.load(data)) else: - logger.info("config.json does not exist, or has not been passed. config parameters may default to config_defaults.json") - logger.info("you may like to generate the config.json file on the configuration page") + logger.info( + "config.json does not exist, or has not been passed. config parameters may default to config_defaults.json" + ) + logger.info( + "you may like to generate the config.json file on the configuration page" + ) # Check to see if legacy config_emhass.yaml was provided (default /app/config_emhass.yaml) # Convert legacy parameter definitions/format to match config.json if legacy_config_path and pathlib.Path(legacy_config_path).is_file(): - with open(legacy_config_path, 'r') as data: + with open(legacy_config_path, "r") as data: legacy_config = yaml.load(data, Loader=yaml.FullLoader) - legacy_config_parameters = build_legacy_config_params(emhass_conf,legacy_config,logger) + legacy_config_parameters = build_legacy_config_params( + emhass_conf, legacy_config, logger + ) if type(legacy_config_parameters) is not bool: - logger.info("Obtaining parameters from config_emhass.yaml: (will overwrite config parameters)") - config.update(legacy_config_parameters) + logger.info( + "Obtaining parameters from config_emhass.yaml: (will overwrite config parameters)" + ) + config.update(legacy_config_parameters) return config -def build_legacy_config_params(emhass_conf: dict, legacy_config: dict, - logger: logging.Logger) -> dict: +def build_legacy_config_params( + emhass_conf: dict, legacy_config: dict, logger: logging.Logger +) -> dict: """ - Build a config dictionary with legacy config_emhass.yaml file. + Build a config dictionary with legacy config_emhass.yaml file. Uses the associations file to convert parameter naming conventions (to config.json/config_defaults.json). Extracts the parameter values and formats to match config.json. @@ -871,76 +987,104 @@ def build_legacy_config_params(emhass_conf: dict, legacy_config: dict, :rtype: dict """ - # Association file key reference # association[0] = config catagories # association[1] = legacy parameter name # association[2] = parameter (config.json/config_defaults.json) - # association[3] = parameter list name if exists (not used, from legacy options.json) + # association[3] = parameter list name if exists (not used, from legacy options.json) # Check each config catagories exists, else create blank dict for categories (avoid errors) - legacy_config['retrieve_hass_conf'] = legacy_config.get('retrieve_hass_conf',{}) - legacy_config['optim_conf'] = legacy_config.get('optim_conf',{}) - legacy_config['plant_conf'] = legacy_config.get('plant_conf',{}) + legacy_config["retrieve_hass_conf"] = legacy_config.get("retrieve_hass_conf", {}) + legacy_config["optim_conf"] = legacy_config.get("optim_conf", {}) + legacy_config["plant_conf"] = legacy_config.get("plant_conf", {}) config = {} # Use associations list to map legacy parameter name with config.json parameter name - if emhass_conf['associations_path'].exists(): - with emhass_conf['associations_path'].open('r') as data: - associations = list(csv.reader(data, delimiter=",")) + if emhass_conf["associations_path"].exists(): + with emhass_conf["associations_path"].open("r") as data: + associations = list(csv.reader(data, delimiter=",")) else: - logger.error("Cant find associations file (associations.csv) in: " + str(emhass_conf['associations_path'])) + logger.error( + "Cant find associations file (associations.csv) in: " + + str(emhass_conf["associations_path"]) + ) return False - + # Loop through all parameters in association file # Append config with existing legacy config parameters (converting alternative parameter naming conventions with associations list) for association in associations: # if legacy config catagories exists and if legacy parameter exists in config catagories - if legacy_config.get(association[0],None) is not None and legacy_config[association[0]].get(association[1],None) is not None: + if ( + legacy_config.get(association[0], None) is not None + and legacy_config[association[0]].get(association[1], None) is not None + ): config[association[2]] = legacy_config[association[0]][association[1]] - + # If config now has load_peak_hour_periods, extract from list of dict - if association[2] == "load_peak_hour_periods" and type(config[association[2]]) is list: - config[association[2]] = dict((key, d[key]) for d in config[association[2]] for key in d) - + if ( + association[2] == "load_peak_hour_periods" + and type(config[association[2]]) is list + ): + config[association[2]] = dict( + (key, d[key]) for d in config[association[2]] for key in d + ) + return config # params['associations_dict'] = associations_dict + def param_to_config(param: dict, logger: logging.Logger) -> dict: """ A function that extracts the parameters from param back to the config.json format. Extracts parameters from config catagories. Attempts to exclude secrets hosed in retrieve_hass_conf. - + :param params: Built configuration parameters :type param: dict :param logger: The logger object :type logger: logging.Logger :return: The built config dictionary :rtype: dict - """ + """ logger.debug("Converting param to config") return_config = {} - config_catagories = ["retrieve_hass_conf","optim_conf","plant_conf"] - secret_params = ["hass_url", "time_zone", "Latitude", "Longitude", "Altitude", "long_lived_token", "solcast_api_key", "solcast_rooftop_id", "solar_forecast_kwp"] - + config_catagories = ["retrieve_hass_conf", "optim_conf", "plant_conf"] + secret_params = [ + "hass_url", + "time_zone", + "Latitude", + "Longitude", + "Altitude", + "long_lived_token", + "solcast_api_key", + "solcast_rooftop_id", + "solar_forecast_kwp", + ] + # Loop through config catagories that contain config params, and extract for config in config_catagories: for parameter in param[config]: - # If parameter is not a secret, append to return_config - if parameter not in secret_params: - return_config[str(parameter)] = param[config][parameter] - + # If parameter is not a secret, append to return_config + if parameter not in secret_params: + return_config[str(parameter)] = param[config][parameter] + return return_config -def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[dict] = {}, options_path: Optional[str] = None, - secrets_path: Optional[str] = None, no_response: Optional[bool] = False) -> Tuple[dict, dict]: + +def build_secrets( + emhass_conf: dict, + logger: logging.Logger, + argument: Optional[dict] = {}, + options_path: Optional[str] = None, + secrets_path: Optional[str] = None, + no_response: Optional[bool] = False, +) -> Tuple[dict, dict]: """ Retrieve and build parameters from secrets locations (ENV, ARG, Secrets file (secrets_emhass.yaml/options.json) and/or Home Assistant (via API)) priority order (lwo to high) = Defaults (written in function), ENV, Options json file, Home Assistant API, Secrets yaml file, Arguments - + :param emhass_conf: Dictionary containing the needed emhass paths :type emhass_conf: dict :param logger: The logger object @@ -957,7 +1101,7 @@ def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[ :rtype: Tuple[dict, dict]: """ - #Set defaults to be overwritten + # Set defaults to be overwritten params_secrets = { "hass_url": "https://myhass.duckdns.org/", "long_lived_token": "thatverylongtokenhere", @@ -967,128 +1111,172 @@ def build_secrets(emhass_conf: dict, logger: logging.Logger, argument: Optional[ "Altitude": 4807.8, "solcast_api_key": "yoursecretsolcastapikey", "solcast_rooftop_id": "yourrooftopid", - "solar_forecast_kwp": 5 + "solar_forecast_kwp": 5, } # Obtain Secrets from ENV? - params_secrets['hass_url'] = os.getenv("EMHASS_URL",params_secrets['hass_url']) - params_secrets['long_lived_token'] = os.getenv("SUPERVISOR_TOKEN", params_secrets['long_lived_token']) - params_secrets['time_zone'] = os.getenv("TIME_ZONE", params_secrets['time_zone']) - params_secrets['Latitude'] = float(os.getenv("LAT", params_secrets['Latitude'])) - params_secrets['Longitude'] = float(os.getenv("LON", params_secrets['Longitude'])) - params_secrets['Altitude'] = float(os.getenv("ALT", params_secrets['Altitude'])) + params_secrets["hass_url"] = os.getenv("EMHASS_URL", params_secrets["hass_url"]) + params_secrets["long_lived_token"] = os.getenv( + "SUPERVISOR_TOKEN", params_secrets["long_lived_token"] + ) + params_secrets["time_zone"] = os.getenv("TIME_ZONE", params_secrets["time_zone"]) + params_secrets["Latitude"] = float(os.getenv("LAT", params_secrets["Latitude"])) + params_secrets["Longitude"] = float(os.getenv("LON", params_secrets["Longitude"])) + params_secrets["Altitude"] = float(os.getenv("ALT", params_secrets["Altitude"])) # Obtain secrets from options.json (Generated from EMHASS-Add-on, Home Assistant addon Configuration page) or Home Assistant API (from local Supervisor API)? # Use local supervisor API to obtain secrets from Home Assistant if hass_url in options.json is empty and SUPERVISOR_TOKEN ENV exists (provided by Home Assistant when running the container as addon) options = {} if options_path and pathlib.Path(options_path).is_file(): - with options_path.open('r') as data: + with options_path.open("r") as data: options = json.load(data) - + # Obtain secrets from Home Assistant? - url_from_options = options.get('hass_url', 'empty') - key_from_options = options.get('long_lived_token', 'empty') + url_from_options = options.get("hass_url", "empty") + key_from_options = options.get("long_lived_token", "empty") # If data path specified by options.json, overwrite emhass_conf['data_path'] - if options.get('data_path', None) != None and pathlib.Path(options['data_path']).exists(): - emhass_conf['data_path'] = pathlib.Path(options['data_path']); - + if ( + options.get("data_path", None) != None + and pathlib.Path(options["data_path"]).exists() + ): + emhass_conf["data_path"] = pathlib.Path(options["data_path"]) + # Check to use Home Assistant local API - if not no_response and \ - (url_from_options == 'empty' or url_from_options == '' or url_from_options == "http://supervisor/core/api") and \ - os.getenv("SUPERVISOR_TOKEN", None) is not None: - - params_secrets['long_lived_token'] = os.getenv("SUPERVISOR_TOKEN",None) - params_secrets['hass_url'] = "http://supervisor/core/api" + if ( + not no_response + and ( + url_from_options == "empty" + or url_from_options == "" + or url_from_options == "http://supervisor/core/api" + ) + and os.getenv("SUPERVISOR_TOKEN", None) is not None + ): + params_secrets["long_lived_token"] = os.getenv("SUPERVISOR_TOKEN", None) + params_secrets["hass_url"] = "http://supervisor/core/api" headers = { - "Authorization": "Bearer " + params_secrets['long_lived_token'], - "content-type": "application/json" + "Authorization": "Bearer " + params_secrets["long_lived_token"], + "content-type": "application/json", } # Obtain secrets from Home Assistant via API logger.debug("Obtaining secrets from Home Assistant Supervisor API") - response = get((params_secrets['hass_url'] + "/config"), headers=headers) + response = get( + (params_secrets["hass_url"] + "/config"), headers=headers + ) if response.status_code < 400: config_hass = response.json() params_secrets = { - 'hass_url': params_secrets['hass_url'], - 'long_lived_token': params_secrets['long_lived_token'], - 'time_zone': config_hass['time_zone'], - 'Latitude': config_hass['latitude'], - 'Longitude': config_hass['longitude'], - 'Altitude': config_hass['elevation'] + "hass_url": params_secrets["hass_url"], + "long_lived_token": params_secrets["long_lived_token"], + "time_zone": config_hass["time_zone"], + "Latitude": config_hass["latitude"], + "Longitude": config_hass["longitude"], + "Altitude": config_hass["elevation"], } - else: + else: # Obtain the url and key secrets if any from options.json (default /app/options.json) - logger.warning("Error obtaining secrets from Home Assistant Supervisor API") + logger.warning( + "Error obtaining secrets from Home Assistant Supervisor API" + ) logger.debug("Obtaining url and key secrets from options.json") - if url_from_options != 'empty' and url_from_options != '': - params_secrets['hass_url'] = url_from_options - if key_from_options != 'empty' and key_from_options != '': - params_secrets['long_lived_token'] = key_from_options - if options.get('time_zone',"empty") != "empty" and options['time_zone'] != '': - params_secrets['time_zone'] = options['time_zone'] - if options.get('Latitude',None) is not None and bool(options['Latitude']): - params_secrets['Latitude'] = options['Latitude'] - if options.get('Longitude',None) is not None and bool(options['Longitude']): - params_secrets['Longitude'] = options['Longitude'] - if options.get('Altitude',None) is not None and bool(options['Altitude']): - params_secrets['Altitude'] = options['Altitude'] + if url_from_options != "empty" and url_from_options != "": + params_secrets["hass_url"] = url_from_options + if key_from_options != "empty" and key_from_options != "": + params_secrets["long_lived_token"] = key_from_options + if ( + options.get("time_zone", "empty") != "empty" + and options["time_zone"] != "" + ): + params_secrets["time_zone"] = options["time_zone"] + if options.get("Latitude", None) is not None and bool( + options["Latitude"] + ): + params_secrets["Latitude"] = options["Latitude"] + if options.get("Longitude", None) is not None and bool( + options["Longitude"] + ): + params_secrets["Longitude"] = options["Longitude"] + if options.get("Altitude", None) is not None and bool( + options["Altitude"] + ): + params_secrets["Altitude"] = options["Altitude"] else: # Obtain the url and key secrets if any from options.json (default /app/options.json) logger.debug("Obtaining url and key secrets from options.json") - if url_from_options != 'empty' and url_from_options != '': - params_secrets['hass_url'] = url_from_options - if key_from_options != 'empty' and key_from_options != '': - params_secrets['long_lived_token'] = key_from_options - if options.get('time_zone',"empty") != "empty" and options['time_zone'] != '': - params_secrets['time_zone'] = options['time_zone'] - if options.get('Latitude',None) is not None and bool(options['Latitude']): - params_secrets['Latitude'] = options['Latitude'] - if options.get('Longitude',None) is not None and bool(options['Longitude']): - params_secrets['Longitude'] = options['Longitude'] - if options.get('Altitude',None) is not None and bool(options['Altitude']): - params_secrets['Altitude'] = options['Altitude'] - + if url_from_options != "empty" and url_from_options != "": + params_secrets["hass_url"] = url_from_options + if key_from_options != "empty" and key_from_options != "": + params_secrets["long_lived_token"] = key_from_options + if ( + options.get("time_zone", "empty") != "empty" + and options["time_zone"] != "" + ): + params_secrets["time_zone"] = options["time_zone"] + if options.get("Latitude", None) is not None and bool( + options["Latitude"] + ): + params_secrets["Latitude"] = options["Latitude"] + if options.get("Longitude", None) is not None and bool( + options["Longitude"] + ): + params_secrets["Longitude"] = options["Longitude"] + if options.get("Altitude", None) is not None and bool( + options["Altitude"] + ): + params_secrets["Altitude"] = options["Altitude"] + # Obtain the forecast secrets (if any) from options.json (default /app/options.json) - forecast_secrets = ["solcast_api_key","solcast_rooftop_id","solar_forecast_kwp"] + forecast_secrets = [ + "solcast_api_key", + "solcast_rooftop_id", + "solar_forecast_kwp", + ] if any(x in forecast_secrets for x in list(options.keys())): logger.debug("Obtaining forecast secrets from options.json") - if options.get('solcast_api_key',"empty") != "empty" and options['solcast_api_key'] != '': - params_secrets['solcast_api_key'] = options['solcast_api_key'] - if options.get('solcast_rooftop_id',"empty") != "empty" and options['solcast_rooftop_id'] != '': - params_secrets['solcast_rooftop_id'] = options['solcast_rooftop_id'] - if options.get('solar_forecast_kwp',None) and bool(options['solar_forecast_kwp']): - params_secrets['solar_forecast_kwp'] = options['solar_forecast_kwp'] - + if ( + options.get("solcast_api_key", "empty") != "empty" + and options["solcast_api_key"] != "" + ): + params_secrets["solcast_api_key"] = options["solcast_api_key"] + if ( + options.get("solcast_rooftop_id", "empty") != "empty" + and options["solcast_rooftop_id"] != "" + ): + params_secrets["solcast_rooftop_id"] = options["solcast_rooftop_id"] + if options.get("solar_forecast_kwp", None) and bool( + options["solar_forecast_kwp"] + ): + params_secrets["solar_forecast_kwp"] = options["solar_forecast_kwp"] + # Obtain secrets from secrets_emhass.yaml? (default /app/secrets_emhass.yaml) if secrets_path and pathlib.Path(secrets_path).is_file(): logger.debug("Obtaining secrets from secrets file") - with open(pathlib.Path(secrets_path), 'r') as file: + with open(pathlib.Path(secrets_path), "r") as file: params_secrets.update(yaml.load(file, Loader=yaml.FullLoader)) - # Receive key and url from ARG/arguments? - if argument.get('url',None) is not None: - params_secrets['hass_url'] = argument['url'] - logger.debug("Obtaining url from passed argument") - if argument.get('key',None) is not None: - params_secrets['long_lived_token'] = argument['key'] - logger.debug("Obtaining long_lived_token from passed argument") - - return emhass_conf, params_secrets - - - -def build_params(emhass_conf: dict, params_secrets: dict, config: dict, - logger: logging.Logger) -> dict: + # Receive key and url from ARG/arguments? + if argument.get("url", None) is not None: + params_secrets["hass_url"] = argument["url"] + logger.debug("Obtaining url from passed argument") + if argument.get("key", None) is not None: + params_secrets["long_lived_token"] = argument["key"] + logger.debug("Obtaining long_lived_token from passed argument") + + return emhass_conf, params_secrets + + +def build_params( + emhass_conf: dict, params_secrets: dict, config: dict, logger: logging.Logger +) -> dict: """ Build the main params dictionary from the config and secrets Appends configuration catagories used by emhass to the parameters. (with use of the associations file as a reference) - + :param emhass_conf: Dictionary containing the needed emhass paths :type emhass_conf: dict :param params_secrets: The dictionary containing the built secret variables :type params_secrets: dict - :param config: The dictionary of built config parameters + :param config: The dictionary of built config parameters :type config: dict :param logger: The logger object :type logger: logging.Logger @@ -1097,104 +1285,203 @@ def build_params(emhass_conf: dict, params_secrets: dict, config: dict, """ if type(params_secrets) is not dict: params_secrets = {} - + params = {} - #Start with blank config catagories - params['retrieve_hass_conf'] = {} - params['params_secrets'] = {} - params['optim_conf'] = {} - params['plant_conf'] = {} - - # Obtain associations to categorize parameters to their corresponding config catagories - if emhass_conf.get('associations_path', get_root(__file__, num_parent=2) / 'data/associations.csv').exists(): - with emhass_conf['associations_path'].open('r') as data: + # Start with blank config catagories + params["retrieve_hass_conf"] = {} + params["params_secrets"] = {} + params["optim_conf"] = {} + params["plant_conf"] = {} + + # Obtain associations to categorize parameters to their corresponding config catagories + if emhass_conf.get( + "associations_path", get_root(__file__, num_parent=2) / "data/associations.csv" + ).exists(): + with emhass_conf["associations_path"].open("r") as data: associations = list(csv.reader(data, delimiter=",")) else: - logger.error("Unable to obtain the associations file (associations.csv) in: " + str(emhass_conf['associations_path'])) + logger.error( + "Unable to obtain the associations file (associations.csv) in: " + + str(emhass_conf["associations_path"]) + ) return False # Association file key reference # association[0] = config catagories # association[1] = legacy parameter name # association[2] = parameter (config.json/config_defaults.json) - # association[3] = parameter list name if exists (not used, from legacy options.json) - + # association[3] = parameter list name if exists (not used, from legacy options.json) # Use association list to append parameters from config into params (with corresponding config catagories) for association in associations: - # If parameter has list_ name and parameter in config is presented with its list name + # If parameter has list_ name and parameter in config is presented with its list name # (ie, config parameter is in legacy options.json format) - if len(association) == 4 and config.get(association[3],None) is not None: + if len(association) == 4 and config.get(association[3], None) is not None: # Extract lists of dictionaries if config[association[3]] and type(config[association[3]][0]) is dict: - params[association[0]][association[2]] = [i[association[2]] for i in config[association[3]]] + params[association[0]][association[2]] = [ + i[association[2]] for i in config[association[3]] + ] else: params[association[0]][association[2]] = config[association[3]] - # Else, directly set value of config parameter to param - elif config.get(association[2],None) is not None: + # Else, directly set value of config parameter to param + elif config.get(association[2], None) is not None: params[association[0]][association[2]] = config[association[2]] # Check if we need to create `list_hp_periods` from config (ie. legacy options.json format) - if params.get('optim_conf',None) is not None and config.get("list_peak_hours_periods_start_hours", None) is not None and config.get("list_peak_hours_periods_end_hours", None) is not None: - start_hours_list = [i["peak_hours_periods_start_hours"] for i in config["list_peak_hours_periods_start_hours"]] - end_hours_list = [i["peak_hours_periods_end_hours"] for i in config["list_peak_hours_periods_end_hours"]] - num_peak_hours = len(start_hours_list) - list_hp_periods_list = {'period_hp_'+str(i+1):[{'start':start_hours_list[i]},{'end':end_hours_list[i]}] for i in range(num_peak_hours)} - params['optim_conf']['load_peak_hour_periods'] = list_hp_periods_list + if ( + params.get("optim_conf", None) is not None + and config.get("list_peak_hours_periods_start_hours", None) is not None + and config.get("list_peak_hours_periods_end_hours", None) is not None + ): + start_hours_list = [ + i["peak_hours_periods_start_hours"] + for i in config["list_peak_hours_periods_start_hours"] + ] + end_hours_list = [ + i["peak_hours_periods_end_hours"] + for i in config["list_peak_hours_periods_end_hours"] + ] + num_peak_hours = len(start_hours_list) + list_hp_periods_list = { + "period_hp_" + str(i + 1): [ + {"start": start_hours_list[i]}, + {"end": end_hours_list[i]}, + ] + for i in range(num_peak_hours) + } + params["optim_conf"]["load_peak_hour_periods"] = list_hp_periods_list else: # Else, check param already contains load_peak_hour_periods from config - if params['optim_conf'].get('load_peak_hour_periods',None) is None: - logger.warning("Unable to detect or create load_peak_hour_periods parameter") + if params["optim_conf"].get("load_peak_hour_periods", None) is None: + logger.warning( + "Unable to detect or create load_peak_hour_periods parameter" + ) # Format load_peak_hour_periods list to dict if necessary - if params['optim_conf'].get('load_peak_hour_periods',None) is not None and isinstance(params['optim_conf']['load_peak_hour_periods'], list): - params['optim_conf']['load_peak_hour_periods'] = dict((key, d[key]) for d in params['optim_conf']['load_peak_hour_periods'] for key in d) + if params["optim_conf"].get( + "load_peak_hour_periods", None + ) is not None and isinstance(params["optim_conf"]["load_peak_hour_periods"], list): + params["optim_conf"]["load_peak_hour_periods"] = dict( + (key, d[key]) + for d in params["optim_conf"]["load_peak_hour_periods"] + for key in d + ) # Call function to check parameter lists that require the same length as deferrable loads # If not, set defaults it fill in gaps - if params['optim_conf'].get('number_of_deferrable_loads',None) is not None: - num_def_loads = params['optim_conf']['number_of_deferrable_loads'] - params['optim_conf']['start_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'start_timesteps_of_each_deferrable_load',logger) - params['optim_conf']['end_timesteps_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'end_timesteps_of_each_deferrable_load',logger) - params['optim_conf']['set_deferrable_load_single_constant'] = check_def_loads(num_def_loads,params['optim_conf'],False,'set_deferrable_load_single_constant',logger) - params['optim_conf']['treat_deferrable_load_as_semi_cont'] = check_def_loads(num_def_loads,params['optim_conf'],True,'treat_deferrable_load_as_semi_cont',logger) - params['optim_conf']['set_deferrable_startup_penalty'] = check_def_loads(num_def_loads,params['optim_conf'],0.0,'set_deferrable_startup_penalty',logger) - params['optim_conf']['operating_hours_of_each_deferrable_load'] = check_def_loads(num_def_loads,params['optim_conf'],0,'operating_hours_of_each_deferrable_load',logger) - params['optim_conf']['nominal_power_of_deferrable_loads'] = check_def_loads(num_def_loads,params['optim_conf'],0,'nominal_power_of_deferrable_loads',logger) + if params["optim_conf"].get("number_of_deferrable_loads", None) is not None: + num_def_loads = params["optim_conf"]["number_of_deferrable_loads"] + params["optim_conf"]["start_timesteps_of_each_deferrable_load"] = ( + check_def_loads( + num_def_loads, + params["optim_conf"], + 0, + "start_timesteps_of_each_deferrable_load", + logger, + ) + ) + params["optim_conf"]["end_timesteps_of_each_deferrable_load"] = check_def_loads( + num_def_loads, + params["optim_conf"], + 0, + "end_timesteps_of_each_deferrable_load", + logger, + ) + params["optim_conf"]["set_deferrable_load_single_constant"] = check_def_loads( + num_def_loads, + params["optim_conf"], + False, + "set_deferrable_load_single_constant", + logger, + ) + params["optim_conf"]["treat_deferrable_load_as_semi_cont"] = check_def_loads( + num_def_loads, + params["optim_conf"], + True, + "treat_deferrable_load_as_semi_cont", + logger, + ) + params["optim_conf"]["set_deferrable_startup_penalty"] = check_def_loads( + num_def_loads, + params["optim_conf"], + 0.0, + "set_deferrable_startup_penalty", + logger, + ) + params["optim_conf"]["operating_hours_of_each_deferrable_load"] = ( + check_def_loads( + num_def_loads, + params["optim_conf"], + 0, + "operating_hours_of_each_deferrable_load", + logger, + ) + ) + params["optim_conf"]["nominal_power_of_deferrable_loads"] = check_def_loads( + num_def_loads, + params["optim_conf"], + 0, + "nominal_power_of_deferrable_loads", + logger, + ) else: logger.warning("unable to obtain parameter: number_of_deferrable_loads") # historic_days_to_retrieve should be no less then 2 - if params["retrieve_hass_conf"].get('historic_days_to_retrieve',None) is not None: - if params["retrieve_hass_conf"]['historic_days_to_retrieve'] < 2: - params["retrieve_hass_conf"]['historic_days_to_retrieve'] = 2 - logger.warning("days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history") + if params["retrieve_hass_conf"].get("historic_days_to_retrieve", None) is not None: + if params["retrieve_hass_conf"]["historic_days_to_retrieve"] < 2: + params["retrieve_hass_conf"]["historic_days_to_retrieve"] = 2 + logger.warning( + "days_to_retrieve should not be lower then 2, setting days_to_retrieve to 2. Make sure your sensors also have at least 2 days of history" + ) else: logger.warning("unable to obtain parameter: historic_days_to_retrieve") # Configure secrets, set params to correct config categorie # retrieve_hass_conf - params['retrieve_hass_conf']["hass_url"] = params_secrets.get("hass_url",None) - params['retrieve_hass_conf']["long_lived_token"] = params_secrets.get("long_lived_token",None) - params['retrieve_hass_conf']["time_zone"] = params_secrets.get("time_zone",None) - params['retrieve_hass_conf']['Latitude'] = params_secrets.get('Latitude',None) - params['retrieve_hass_conf']['Longitude'] = params_secrets.get('Longitude',None) - params['retrieve_hass_conf']['Altitude'] = params_secrets.get('Altitude',None) + params["retrieve_hass_conf"]["hass_url"] = params_secrets.get("hass_url", None) + params["retrieve_hass_conf"]["long_lived_token"] = params_secrets.get( + "long_lived_token", None + ) + params["retrieve_hass_conf"]["time_zone"] = params_secrets.get("time_zone", None) + params["retrieve_hass_conf"]["Latitude"] = params_secrets.get("Latitude", None) + params["retrieve_hass_conf"]["Longitude"] = params_secrets.get("Longitude", None) + params["retrieve_hass_conf"]["Altitude"] = params_secrets.get("Altitude", None) # Update optional param secrets - if params["optim_conf"].get('weather_forecast_method',None) is not None: - if params["optim_conf"]['weather_forecast_method'] == "solcast": - params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get("solcast_api_key", "123456") - params["params_secrets"]["solcast_api_key"] = params_secrets.get("solcast_api_key", "123456") - params["retrieve_hass_conf"]["solcast_rooftop_id"] = params_secrets.get("solcast_rooftop_id", "123456") - params["params_secrets"]["solcast_rooftop_id"] = params_secrets.get("solcast_rooftop_id", "123456") - elif params["optim_conf"]['weather_forecast_method'] == "solar.forecast": - params["retrieve_hass_conf"]["solar_forecast_kwp"] = params_secrets.get("solar_forecast_kwp", 5) - params["params_secrets"]["solar_forecast_kwp"] = params_secrets.get("solar_forecast_kwp", 5) + if params["optim_conf"].get("weather_forecast_method", None) is not None: + if params["optim_conf"]["weather_forecast_method"] == "solcast": + params["retrieve_hass_conf"]["solcast_api_key"] = params_secrets.get( + "solcast_api_key", "123456" + ) + params["params_secrets"]["solcast_api_key"] = params_secrets.get( + "solcast_api_key", "123456" + ) + params["retrieve_hass_conf"]["solcast_rooftop_id"] = params_secrets.get( + "solcast_rooftop_id", "123456" + ) + params["params_secrets"]["solcast_rooftop_id"] = params_secrets.get( + "solcast_rooftop_id", "123456" + ) + elif params["optim_conf"]["weather_forecast_method"] == "solar.forecast": + params["retrieve_hass_conf"]["solar_forecast_kwp"] = params_secrets.get( + "solar_forecast_kwp", 5 + ) + params["params_secrets"]["solar_forecast_kwp"] = params_secrets.get( + "solar_forecast_kwp", 5 + ) else: - logger.warning("Unable to detect weather_forecast_method parameter") + logger.warning("Unable to detect weather_forecast_method parameter") # Check if secrets parameters still defaults values - secret_params = ["https://myhass.duckdns.org/","thatverylongtokenhere",45.83,6.86,4807.8] - if any(x in secret_params for x in params['retrieve_hass_conf'].values()): - logger.warning("Some secret parameters values are still matching their defaults") - + secret_params = [ + "https://myhass.duckdns.org/", + "thatverylongtokenhere", + 45.83, + 6.86, + 4807.8, + ] + if any(x in secret_params for x in params["retrieve_hass_conf"].values()): + logger.warning( + "Some secret parameters values are still matching their defaults" + ) # Set empty dict objects for params passed_data # To be latter populated with runtime parameters (treat_runtimeparams) @@ -1206,16 +1493,19 @@ def build_params(emhass_conf: dict, params_secrets: dict, config: dict, "prediction_horizon": None, "soc_init": None, "soc_final": None, - 'operating_hours_of_each_deferrable_load': None, - 'start_timesteps_of_each_deferrable_load': None, - 'end_timesteps_of_each_deferrable_load': None, + "operating_hours_of_each_deferrable_load": None, + "start_timesteps_of_each_deferrable_load": None, + "end_timesteps_of_each_deferrable_load": None, "alpha": None, "beta": None, } return params -def check_def_loads(num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger): + +def check_def_loads( + num_def_loads: int, parameter: list[dict], default, parameter_name: str, logger +): """ Check parameter lists with deferrable loads number, if they do not match, enlarge to fit. @@ -1229,12 +1519,21 @@ def check_def_loads(num_def_loads: int, parameter: list[dict], default, paramete :type logger: str :param logger: The logger object :type logger: logging.Logger - return: parameter list + return: parameter list :rtype: list[dict] """ - if parameter.get(parameter_name,None) is not None and type(parameter[parameter_name]) is list and num_def_loads > len(parameter[parameter_name]): - logger.warning(parameter_name + " does not match number in num_def_loads, adding default values ("+ str(default) + ") to parameter") + if ( + parameter.get(parameter_name, None) is not None + and type(parameter[parameter_name]) is list + and num_def_loads > len(parameter[parameter_name]) + ): + logger.warning( + parameter_name + + " does not match number in num_def_loads, adding default values (" + + str(default) + + ") to parameter" + ) for x in range(len(parameter[parameter_name]), num_def_loads): parameter[parameter_name].append(default) return parameter[parameter_name] diff --git a/src/emhass/web_server.py b/src/emhass/web_server.py index f6752768..e16aa0fc 100644 --- a/src/emhass/web_server.py +++ b/src/emhass/web_server.py @@ -1,51 +1,78 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from flask import Flask, request, make_response, render_template +import argparse +import json +import logging +import os +import pickle +import re +import threading +from distutils.util import strtobool +from importlib.metadata import PackageNotFoundError, version +from pathlib import Path + +import yaml +from flask import Flask, make_response, request from jinja2 import Environment, PackageLoader -from requests import get from waitress import serve -from importlib.metadata import version, PackageNotFoundError -from pathlib import Path -import os, json, argparse, pickle, yaml, logging, re, threading -from distutils.util import strtobool -from emhass.command_line import set_input_data_dict -from emhass.command_line import perfect_forecast_optim, dayahead_forecast_optim, naive_mpc_optim -from emhass.command_line import forecast_model_fit, forecast_model_predict, forecast_model_tune, weather_forecast_cache -from emhass.command_line import regressor_model_fit, regressor_model_predict -from emhass.command_line import publish_data, continual_publish -from emhass.utils import get_injection_dict, get_injection_dict_forecast_model_fit, \ - get_injection_dict_forecast_model_tune, build_config, build_secrets, build_params, \ - param_to_config, build_legacy_config_params +from emhass.command_line import ( + continual_publish, + dayahead_forecast_optim, + forecast_model_fit, + forecast_model_predict, + forecast_model_tune, + naive_mpc_optim, + perfect_forecast_optim, + publish_data, + regressor_model_fit, + regressor_model_predict, + set_input_data_dict, + weather_forecast_cache, +) +from emhass.utils import ( + build_config, + build_legacy_config_params, + build_params, + build_secrets, + get_injection_dict, + get_injection_dict_forecast_model_fit, + get_injection_dict_forecast_model_tune, + param_to_config, +) # Define the Flask instance app = Flask(__name__) emhass_conf = {} + def checkFileLog(refString=None) -> bool: """ Check logfile for error, anything after string match if provided. - :param refString: String to reduce log area to check for errors. Use to reduce log to check anything after string match (ie. an action). + :param refString: String to reduce log area to check for errors. Use to reduce log to check anything after string match (ie. an action). :type refString: str :return: Boolean return if error was found in logs :rtype: bool """ - if (refString is not None): - logArray = grabLog(refString) #grab reduced log array (everything after string match) - else: - if ((emhass_conf['data_path'] / 'actionLogs.txt')).exists(): - with open(str(emhass_conf['data_path'] / 'actionLogs.txt'), "r") as fp: - logArray = fp.readlines() + if refString is not None: + logArray = grabLog( + refString + ) # grab reduced log array (everything after string match) + else: + if (emhass_conf["data_path"] / "actionLogs.txt").exists(): + with open(str(emhass_conf["data_path"] / "actionLogs.txt"), "r") as fp: + logArray = fp.readlines() else: app.logger.debug("Unable to obtain actionLogs.txt") for logString in logArray: - if (logString.split(' ', 1)[0] == "ERROR"): - return True + if logString.split(" ", 1)[0] == "ERROR": + return True return False + def grabLog(refString) -> list: """ Find string in logs, append all lines after into list to return. @@ -58,31 +85,33 @@ def grabLog(refString) -> list: """ isFound = [] output = [] - if ((emhass_conf['data_path'] / 'actionLogs.txt')).exists(): - with open(str(emhass_conf['data_path'] / 'actionLogs.txt'), "r") as fp: - logArray = fp.readlines() - # Find all string matches, log key (line Number) in isFound - for x in range(len(logArray)-1): - if (re.search(refString,logArray[x])): - isFound.append(x) - if len(isFound) != 0: - # Use last item in isFound to extract action logs - for x in range(isFound[-1],len(logArray)): - output.append(logArray[x]) + if (emhass_conf["data_path"] / "actionLogs.txt").exists(): + with open(str(emhass_conf["data_path"] / "actionLogs.txt"), "r") as fp: + logArray = fp.readlines() + # Find all string matches, log key (line Number) in isFound + for x in range(len(logArray) - 1): + if re.search(refString, logArray[x]): + isFound.append(x) + if len(isFound) != 0: + # Use last item in isFound to extract action logs + for x in range(isFound[-1], len(logArray)): + output.append(logArray[x]) return output + # Clear the log file def clearFileLog(): """ Clear the contents of the log file (actionLogs.txt) """ - if ((emhass_conf['data_path'] / 'actionLogs.txt')).exists(): - with open(str(emhass_conf['data_path'] / 'actionLogs.txt'), "w") as fp: - fp.truncate() + if (emhass_conf["data_path"] / "actionLogs.txt").exists(): + with open(str(emhass_conf["data_path"] / "actionLogs.txt"), "w") as fp: + fp.truncate() -@app.route('/') -@app.route('/index') + +@app.route("/") +@app.route("/index") def index(): """ Render initial index page and serve to web server. @@ -91,29 +120,31 @@ def index(): """ app.logger.info("EMHASS server online, serving index.html...") # Load HTML template - file_loader = PackageLoader('emhass', 'templates') + file_loader = PackageLoader("emhass", "templates") env = Environment(loader=file_loader) - #check if index.html exists - if 'index.html' not in env.list_templates(): + # check if index.html exists + if "index.html" not in env.list_templates(): app.logger.error("Unable to find index.html in emhass module") - return make_response(["ERROR: unable to find index.html in emhass module"],404) - template = env.get_template('index.html') + return make_response(["ERROR: unable to find index.html in emhass module"], 404) + template = env.get_template("index.html") # Load cached dict (if exists), to present generated plot tables - if (emhass_conf['data_path'] / 'injection_dict.pkl').exists(): - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "rb") as fid: + if (emhass_conf["data_path"] / "injection_dict.pkl").exists(): + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "rb") as fid: injection_dict = pickle.load(fid) else: - app.logger.info("The data container dictionary is empty... Please launch an optimization task") - injection_dict={} + app.logger.info( + "The data container dictionary is empty... Please launch an optimization task" + ) + injection_dict = {} - # replace {{basename}} in html template html with path root + # replace {{basename}} in html template html with path root # basename = request.headers.get("X-Ingress-Path", "") # return make_response(template.render(injection_dict=injection_dict, basename=basename)) - + return make_response(template.render(injection_dict=injection_dict)) -@app.route('/configuration') +@app.route("/configuration") def configuration(): """ Configuration page actions: @@ -122,41 +153,46 @@ def configuration(): """ app.logger.info("serving configuration.html...") # Load HTML template - file_loader = PackageLoader('emhass', 'templates') + file_loader = PackageLoader("emhass", "templates") env = Environment(loader=file_loader) - #check if configuration.html exists - if 'configuration.html' not in env.list_templates(): + # check if configuration.html exists + if "configuration.html" not in env.list_templates(): app.logger.error("Unable to find configuration.html in emhass module") - return make_response(["ERROR: unable to find configuration.html in emhass module"],404) - template = env.get_template('configuration.html') + return make_response( + ["ERROR: unable to find configuration.html in emhass module"], 404 + ) + template = env.get_template("configuration.html") return make_response(template.render(config=params)) -@app.route('/template', methods=['GET']) +@app.route("/template", methods=["GET"]) def template_action(): """ - template page actions: + template page actions: Render and serve template html """ app.logger.info(" >> Sending rendered template table data") - file_loader = PackageLoader('emhass', 'templates') + file_loader = PackageLoader("emhass", "templates") env = Environment(loader=file_loader) # Check if template.html exists - if 'template.html' not in env.list_templates(): + if "template.html" not in env.list_templates(): app.logger.error("Unable to find template.html in emhass module") - return make_response(["WARNING: unable to find template.html in emhass module"],404) - template = env.get_template('template.html') - if (emhass_conf['data_path'] / 'injection_dict.pkl').exists(): - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "rb") as fid: + return make_response( + ["WARNING: unable to find template.html in emhass module"], 404 + ) + template = env.get_template("template.html") + if (emhass_conf["data_path"] / "injection_dict.pkl").exists(): + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "rb") as fid: injection_dict = pickle.load(fid) else: app.logger.warning("Unable to obtain plot data from injection_dict.pkl") app.logger.warning("Try running an launch an optimization task") - injection_dict={} + injection_dict = {} return make_response(template.render(injection_dict=injection_dict)) -@app.route('/get-config', methods=['GET']) + +@app.route("/get-config", methods=["GET"]) def parameter_get(): """ Get request action that builds, formats and sends config as json (config.json format) @@ -164,21 +200,27 @@ def parameter_get(): """ app.logger.debug("Obtaining current saved parameters as config") # Build config from all possible sources (inc. legacy yaml config) - config = build_config(emhass_conf,app.logger,emhass_conf["defaults_path"],emhass_conf["config_path"],emhass_conf["legacy_config_path"]) + config = build_config( + emhass_conf, + app.logger, + emhass_conf["defaults_path"], + emhass_conf["config_path"], + emhass_conf["legacy_config_path"], + ) if type(config) is bool and not config: - return make_response(["failed to retrieve default config file"],500) + return make_response(["failed to retrieve default config file"], 500) # Format parameters in config with params (converting legacy json parameters from options.json if any) - params = build_params(emhass_conf,{},config,app.logger) + params = build_params(emhass_conf, {}, config, app.logger) if type(params) is bool and not params: - return make_response(["Unable to obtain associations file"],500) + return make_response(["Unable to obtain associations file"], 500) # Covert formatted parameters from params back into config.json format - return_config = param_to_config(params,app.logger) + return_config = param_to_config(params, app.logger) # Send config - return make_response(return_config,201) + return make_response(return_config, 201) # Get default Config -@app.route('/get-config/defaults', methods=['GET']) +@app.route("/get-config/defaults", methods=["GET"]) def config_get(): """ Get request action, retrieves and sends default configuration @@ -186,21 +228,21 @@ def config_get(): """ app.logger.debug("Obtaining default parameters") # Build config, passing only default file - config = build_config(emhass_conf,app.logger,emhass_conf["defaults_path"]) + config = build_config(emhass_conf, app.logger, emhass_conf["defaults_path"]) if type(config) is bool and not config: - return make_response(["failed to retrieve default config file"],500) + return make_response(["failed to retrieve default config file"], 500) # Format parameters in config with params - params = build_params(emhass_conf,{},config,app.logger) + params = build_params(emhass_conf, {}, config, app.logger) if type(params) is bool and not params: - return make_response(["Unable to obtain associations file"],500) + return make_response(["Unable to obtain associations file"], 500) # Covert formatted parameters from params back into config.json format - return_config = param_to_config(params,app.logger) + return_config = param_to_config(params, app.logger) # Send params - return make_response(return_config,201) + return make_response(return_config, 201) # Get YAML-to-JSON config -@app.route('/get-json', methods=['POST']) +@app.route("/get-json", methods=["POST"]) def json_convert(): """ Post request action, receives yaml config (config_emhass.yaml or EMHASS-Add-on config page) and converts to config json format. @@ -212,113 +254,130 @@ def json_convert(): # If filed to Parse YAML if yaml_config is None: - return make_response(["failed to Parse YAML from data"],400) + return make_response(["failed to Parse YAML from data"], 400) # Test YAML is legacy config format (from config_emhass.yaml) - test_legacy_config = build_legacy_config_params(emhass_conf,yaml_config, app.logger) + test_legacy_config = build_legacy_config_params( + emhass_conf, yaml_config, app.logger + ) if test_legacy_config: yaml_config = test_legacy_config # Format YAML to params (format params. check if params match legacy option.json format) - params = build_params(emhass_conf,{},yaml_config,app.logger) + params = build_params(emhass_conf, {}, yaml_config, app.logger) if type(params) is bool and not params: - return make_response(["Unable to obtain associations file"],500) + return make_response(["Unable to obtain associations file"], 500) # Covert formatted parameters from params back into config.json format - config = param_to_config(params,app.logger) + config = param_to_config(params, app.logger) # convert json to str config = json.dumps(config) # Send params - return make_response(config,201) + return make_response(config, 201) + -@app.route('/set-config', methods=['POST']) +@app.route("/set-config", methods=["POST"]) def parameter_set(): """ Receive JSON config, and save config to file (config.json and param.pkl) """ config = {} - if not emhass_conf['defaults_path']: - return make_response(["Unable to Obtain defaults_path from emhass_conf"],500) - if not emhass_conf['config_path']: - return make_response(["Unable to Obtain config_path from emhass_conf"],500) - + if not emhass_conf["defaults_path"]: + return make_response(["Unable to Obtain defaults_path from emhass_conf"], 500) + if not emhass_conf["config_path"]: + return make_response(["Unable to Obtain config_path from emhass_conf"], 500) + # Load defaults as a reference point (for sorting) and a base to override - if os.path.exists(emhass_conf['defaults_path']) and Path(emhass_conf['defaults_path']).is_file(): - with emhass_conf['defaults_path'].open('r') as data: + if ( + os.path.exists(emhass_conf["defaults_path"]) + and Path(emhass_conf["defaults_path"]).is_file() + ): + with emhass_conf["defaults_path"].open("r") as data: config = json.load(data) else: - app.logger.warning("Unable to obtain default config. only parameters passed from request will be saved to config.json") + app.logger.warning( + "Unable to obtain default config. only parameters passed from request will be saved to config.json" + ) # Retrieve sent config json request_data = request.get_json(force=True) # check if data is empty if len(request_data) == 0: - return make_response(["failed to retrieve config json"],400) - + return make_response(["failed to retrieve config json"], 400) + # Format config by converting to params (format params. check if params match legacy option.json format. If so format) - params = build_params(emhass_conf,params_secrets,request_data,app.logger) + params = build_params(emhass_conf, params_secrets, request_data, app.logger) if type(params) is bool and not params: - return make_response(["Unable to obtain associations file"],500) - + return make_response(["Unable to obtain associations file"], 500) + # Covert formatted parameters from params back into config.json format. # Overwrite existing default parameters in config - config.update(param_to_config(params,app.logger)) + config.update(param_to_config(params, app.logger)) # Save config to config.json - if os.path.exists(emhass_conf['config_path'].parent): - with emhass_conf['config_path'].open('w') as f: + if os.path.exists(emhass_conf["config_path"].parent): + with emhass_conf["config_path"].open("w") as f: json.dump(config, f, indent=4) - else: - return make_response(["Unable to save config file"],500) + else: + return make_response(["Unable to save config file"], 500) request_data - # Save params with updated config - if os.path.exists(emhass_conf['data_path']): - with open(str(emhass_conf['data_path'] / 'params.pkl'), "wb") as fid: - pickle.dump((config_path, build_params(emhass_conf,params_secrets,config,app.logger)), fid) - else: - return make_response(["Unable to save params file, missing data_path"],500) - + # Save params with updated config + if os.path.exists(emhass_conf["data_path"]): + with open(str(emhass_conf["data_path"] / "params.pkl"), "wb") as fid: + pickle.dump( + ( + config_path, + build_params(emhass_conf, params_secrets, config, app.logger), + ), + fid, + ) + else: + return make_response(["Unable to save params file, missing data_path"], 500) + app.logger.info("Saved parameters from webserver") - return make_response({},201) + return make_response({}, 201) -@app.route('/action/', methods=['POST']) + +@app.route("/action/", methods=["POST"]) def action_call(action_name): """ Receive Post action, run action according to passed slug(action_name) (e.g. /action/publish-data) :param action_name: Slug/Action string corresponding to which action to take :type action_name: String - + """ # Setting up parameters # Params - if (emhass_conf['data_path'] / 'params.pkl').exists(): - with open(str(emhass_conf['data_path'] / 'params.pkl'), "rb") as fid: - emhass_conf['config_path'], params = pickle.load(fid) + ActionStr = " >> Obtaining params: " + app.logger.info(ActionStr) + if (emhass_conf["data_path"] / "params.pkl").exists(): + with open(str(emhass_conf["data_path"] / "params.pkl"), "rb") as fid: + emhass_conf["config_path"], params = pickle.load(fid) # Set local costfun variable - if params.get("optim_conf",None) is not None: - costfun = params["optim_conf"].get("costfun","profit") + if params.get("optim_conf", None) is not None: + costfun = params["optim_conf"].get("costfun", "profit") params = json.dumps(params) else: app.logger.error("Unable to find params.pkl file") return make_response(grabLog(ActionStr), 400) # Runtime - runtimeparams = request.get_json(force=True,silent=True) + runtimeparams = request.get_json(force=True, silent=True) if runtimeparams is not None: - if runtimeparams != '{}': + if runtimeparams != "{}": app.logger.info("Passed runtime parameters: " + str(runtimeparams)) else: app.logger.warning("Unable to parse runtime parameters") - runtimeparams = {} + runtimeparams = {} runtimeparams = json.dumps(runtimeparams) # weather-forecast-cache (check before set_input_data_dict) - if action_name == 'weather-forecast-cache': + if action_name == "weather-forecast-cache": ActionStr = " >> Performing weather forecast, try to caching result" app.logger.info(ActionStr) weather_forecast_cache(emhass_conf, params, runtimeparams, app.logger) - msg = f'EMHASS >> Weather Forecast has run and results possibly cached... \n' + msg = "EMHASS >> Weather Forecast has run and results possibly cached... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) @@ -326,158 +385,187 @@ def action_call(action_name): ActionStr = " >> Setting input data dict" app.logger.info(ActionStr) app.logger.warning(costfun) - input_data_dict = set_input_data_dict(emhass_conf, costfun, - params, runtimeparams, action_name, app.logger) + input_data_dict = set_input_data_dict( + emhass_conf, costfun, params, runtimeparams, action_name, app.logger + ) if not input_data_dict: return make_response(grabLog(ActionStr), 400) - + # If continual_publish is True, start thread with loop function - if len(continual_publish_thread) == 0 and input_data_dict['retrieve_hass_conf'].get('continual_publish',False): + if len(continual_publish_thread) == 0 and input_data_dict["retrieve_hass_conf"].get( + "continual_publish", False + ): # Start Thread - continualLoop = threading.Thread(name='continual_publish',target=continual_publish,args=[input_data_dict,entity_path,app.logger]) + continualLoop = threading.Thread( + name="continual_publish", + target=continual_publish, + args=[input_data_dict, entity_path, app.logger], + ) continualLoop.start() - continual_publish_thread.append(continualLoop) + continual_publish_thread.append(continualLoop) # Run action based on POST request # If error in log when running action, return actions log (list) as response. (Using ActionStr as a reference of the action start in the log) # publish-data - if action_name == 'publish-data': + if action_name == "publish-data": ActionStr = " >> Publishing data..." app.logger.info(ActionStr) _ = publish_data(input_data_dict, app.logger) - msg = f'EMHASS >> Action publish-data executed... \n' + msg = "EMHASS >> Action publish-data executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # perfect-optim - elif action_name == 'perfect-optim': + elif action_name == "perfect-optim": ActionStr = " >> Performing perfect optimization..." app.logger.info(ActionStr) opt_res = perfect_forecast_optim(input_data_dict, app.logger) injection_dict = get_injection_dict(opt_res) - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "wb") as fid: + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "wb") as fid: pickle.dump(injection_dict, fid) - msg = f'EMHASS >> Action perfect-optim executed... \n' + msg = "EMHASS >> Action perfect-optim executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # dayahead-optim - elif action_name == 'dayahead-optim': + elif action_name == "dayahead-optim": ActionStr = " >> Performing dayahead optimization..." app.logger.info(ActionStr) opt_res = dayahead_forecast_optim(input_data_dict, app.logger) injection_dict = get_injection_dict(opt_res) - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "wb") as fid: + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "wb") as fid: pickle.dump(injection_dict, fid) - msg = f'EMHASS >> Action dayahead-optim executed... \n' + msg = "EMHASS >> Action dayahead-optim executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # naive-mpc-optim - elif action_name == 'naive-mpc-optim': + elif action_name == "naive-mpc-optim": ActionStr = " >> Performing naive MPC optimization..." app.logger.info(ActionStr) opt_res = naive_mpc_optim(input_data_dict, app.logger) injection_dict = get_injection_dict(opt_res) - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "wb") as fid: + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "wb") as fid: pickle.dump(injection_dict, fid) - msg = f'EMHASS >> Action naive-mpc-optim executed... \n' + msg = "EMHASS >> Action naive-mpc-optim executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # forecast-model-fit - elif action_name == 'forecast-model-fit': + elif action_name == "forecast-model-fit": ActionStr = " >> Performing a machine learning forecast model fit..." app.logger.info(ActionStr) df_fit_pred, _, mlf = forecast_model_fit(input_data_dict, app.logger) - injection_dict = get_injection_dict_forecast_model_fit( - df_fit_pred, mlf) - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "wb") as fid: + injection_dict = get_injection_dict_forecast_model_fit(df_fit_pred, mlf) + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "wb") as fid: pickle.dump(injection_dict, fid) - msg = f'EMHASS >> Action forecast-model-fit executed... \n' + msg = "EMHASS >> Action forecast-model-fit executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # forecast-model-predict - elif action_name == 'forecast-model-predict': + elif action_name == "forecast-model-predict": ActionStr = " >> Performing a machine learning forecast model predict..." app.logger.info(ActionStr) df_pred = forecast_model_predict(input_data_dict, app.logger) if df_pred is None: return make_response(grabLog(ActionStr), 400) - table1 = df_pred.reset_index().to_html(classes='mystyle', index=False) + table1 = df_pred.reset_index().to_html(classes="mystyle", index=False) injection_dict = {} - injection_dict['title'] = '

Custom machine learning forecast model predict

' - injection_dict['subsubtitle0'] = '

Performed a prediction using a pre-trained model

' - injection_dict['table1'] = table1 - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "wb") as fid: + injection_dict["title"] = ( + "

Custom machine learning forecast model predict

" + ) + injection_dict["subsubtitle0"] = ( + "

Performed a prediction using a pre-trained model

" + ) + injection_dict["table1"] = table1 + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "wb") as fid: pickle.dump(injection_dict, fid) - msg = f'EMHASS >> Action forecast-model-predict executed... \n' + msg = "EMHASS >> Action forecast-model-predict executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # forecast-model-tune - elif action_name == 'forecast-model-tune': + elif action_name == "forecast-model-tune": ActionStr = " >> Performing a machine learning forecast model tune..." app.logger.info(ActionStr) - df_pred_optim, mlf = forecast_model_tune(input_data_dict, app.logger) - if df_pred_optim is None or mlf is None: + df_pred_optim, mlf = forecast_model_tune(input_data_dict, app.logger) + if df_pred_optim is None or mlf is None: return make_response(grabLog(ActionStr), 400) - injection_dict = get_injection_dict_forecast_model_tune( - df_pred_optim, mlf) - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "wb") as fid: + injection_dict = get_injection_dict_forecast_model_tune(df_pred_optim, mlf) + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "wb") as fid: pickle.dump(injection_dict, fid) - msg = f'EMHASS >> Action forecast-model-tune executed... \n' + msg = "EMHASS >> Action forecast-model-tune executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # regressor-model-fit - elif action_name == 'regressor-model-fit': + elif action_name == "regressor-model-fit": ActionStr = " >> Performing a machine learning regressor fit..." app.logger.info(ActionStr) regressor_model_fit(input_data_dict, app.logger) - msg = f'EMHASS >> Action regressor-model-fit executed... \n' + msg = "EMHASS >> Action regressor-model-fit executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) - # regressor-model-predict - elif action_name == 'regressor-model-predict': + # regressor-model-predict + elif action_name == "regressor-model-predict": ActionStr = " >> Performing a machine learning regressor predict..." app.logger.info(ActionStr) regressor_model_predict(input_data_dict, app.logger) - msg = f'EMHASS >> Action regressor-model-predict executed... \n' + msg = "EMHASS >> Action regressor-model-predict executed... \n" if not checkFileLog(ActionStr): return make_response(msg, 201) return make_response(grabLog(ActionStr), 400) # Else return error else: app.logger.error("ERROR: passed action is not valid") - msg = f'EMHASS >> ERROR: Passed action is not valid... \n' + msg = "EMHASS >> ERROR: Passed action is not valid... \n" return make_response(msg, 400) + if __name__ == "__main__": # Parsing arguments parser = argparse.ArgumentParser() - parser.add_argument('--url', type=str, help='The URL to your Home Assistant instance, ex the external_url in your hass configuration') - parser.add_argument('--key', type=str, help='Your access key. If using EMHASS in standalone this should be a Long-Lived Access Token') - parser.add_argument('--no_response', type=strtobool, default='False', help='This is set if json response errors occur') + parser.add_argument( + "--url", + type=str, + help="The URL to your Home Assistant instance, ex the external_url in your hass configuration", + ) + parser.add_argument( + "--key", + type=str, + help="Your access key. If using EMHASS in standalone this should be a Long-Lived Access Token", + ) + parser.add_argument( + "--no_response", + type=strtobool, + default="False", + help="This is set if json response errors occur", + ) args = parser.parse_args() # Pre formatted config parameters - config = {} + config = {} # Secrets params_secrets = {} # Built parameters (formatted config + secrets) - params = None - - # Find env's, not not set defaults + params = None + + # Find env's, not not set defaults DATA_PATH = os.getenv("DATA_PATH", default="/app/data/") ROOT_PATH = os.getenv("ROOT_PATH", default=str(Path(__file__).parent)) - CONFIG_PATH = os.getenv('CONFIG_PATH', default="/share/config.json") - OPTIONS_PATH = os.getenv('OPTIONS_PATH', default="/data/options.json") - DEFAULTS_PATH = os.getenv('DEFAULTS_PATH', default=ROOT_PATH +"/data/config_defaults.json") - ASSOCIATIONS_PATH = os.getenv('ASSOCIATIONS_PATH', default=ROOT_PATH + "/data/associations.csv") - LEGACY_CONFIG_PATH = os.getenv("LEGACY_CONFIG_PATH", default="/app/config_emhass.yaml") + CONFIG_PATH = os.getenv("CONFIG_PATH", default="/share/config.json") + OPTIONS_PATH = os.getenv("OPTIONS_PATH", default="/data/options.json") + DEFAULTS_PATH = os.getenv( + "DEFAULTS_PATH", default=ROOT_PATH + "/data/config_defaults.json" + ) + ASSOCIATIONS_PATH = os.getenv( + "ASSOCIATIONS_PATH", default=ROOT_PATH + "/data/associations.csv" + ) + LEGACY_CONFIG_PATH = os.getenv( + "LEGACY_CONFIG_PATH", default="/app/config_emhass.yaml" + ) # Define the paths config_path = Path(CONFIG_PATH) @@ -488,51 +576,62 @@ def action_call(action_name): data_path = Path(DATA_PATH) root_path = Path(ROOT_PATH) # Add paths to emhass_conf - emhass_conf['config_path'] = config_path - emhass_conf['options_path'] = options_path - emhass_conf['defaults_path'] = defaults_path - emhass_conf['associations_path'] = associations_path - emhass_conf['legacy_config_path'] = legacy_config_path - emhass_conf['data_path'] = data_path - emhass_conf['root_path'] = root_path + emhass_conf["config_path"] = config_path + emhass_conf["options_path"] = options_path + emhass_conf["defaults_path"] = defaults_path + emhass_conf["associations_path"] = associations_path + emhass_conf["legacy_config_path"] = legacy_config_path + emhass_conf["data_path"] = data_path + emhass_conf["root_path"] = root_path # Combine parameters from configuration sources (if exists) - config.update(build_config(emhass_conf,app.logger,defaults_path,config_path,legacy_config_path)) + config.update( + build_config( + emhass_conf, app.logger, defaults_path, config_path, legacy_config_path + ) + ) if type(config) is bool and not config: raise Exception("Failed to find default config") # Set local variables - costfun = os.getenv('LOCAL_COSTFUN', config.get('costfun', 'profit')) - logging_level = os.getenv('LOGGING_LEVEL', config.get('logging_level','INFO')) + costfun = os.getenv("LOCAL_COSTFUN", config.get("costfun", "profit")) + logging_level = os.getenv("LOGGING_LEVEL", config.get("logging_level", "INFO")) # Temporary set logging level if debug if logging_level == "DEBUG": app.logger.setLevel(logging.DEBUG) - + ## Secrets argument = {} if args.url: - argument['url'] = args.url + argument["url"] = args.url if args.key: - argument['key'] = args.key + argument["key"] = args.key # Combine secrets from ENV, Arguments/ARG, Secrets file (secrets_emhass.yaml), options (options.json from addon configuration file) and/or Home Assistant Standalone API (if exist) - emhass_conf, secrets = build_secrets(emhass_conf,app.logger,argument,options_path,os.getenv('SECRETS_PATH', default='/app/secrets_emhass.yaml'), bool(args.no_response)) + emhass_conf, secrets = build_secrets( + emhass_conf, + app.logger, + argument, + options_path, + os.getenv("SECRETS_PATH", default="/app/secrets_emhass.yaml"), + bool(args.no_response), + ) params_secrets.update(secrets) - server_ip = params_secrets.get("server_ip","0.0.0.0") + server_ip = params_secrets.get("server_ip", "0.0.0.0") - # Check if data path exists - if not os.path.isdir(emhass_conf['data_path']): - app.logger.warning("Unable to find data_path: " + str(emhass_conf['data_path'])) + # Check if data path exists + if not os.path.isdir(emhass_conf["data_path"]): + app.logger.warning("Unable to find data_path: " + str(emhass_conf["data_path"])) if os.path.isdir(Path("/app/data/")): - emhass_conf['data_path'] = Path("/app/data/") + emhass_conf["data_path"] = Path("/app/data/") else: - Path(root_path / "data/").mkdir(parents=True, exist_ok=True) - emhass_conf['data_path'] = root_path / "data/" - app.logger.info("data_path has been set to " + str(emhass_conf['data_path'])) + Path(root_path / "data/").mkdir(parents=True, exist_ok=True) + emhass_conf["data_path"] = root_path / "data/" + app.logger.info("data_path has been set to " + str(emhass_conf["data_path"])) # Initialize this global dict - if (emhass_conf['data_path'] / 'injection_dict.pkl').exists(): - with open(str(emhass_conf['data_path'] / 'injection_dict.pkl'), "rb") as fid: + if (emhass_conf["data_path"] / "injection_dict.pkl").exists(): + with open(str(emhass_conf["data_path"] / "injection_dict.pkl"), "rb") as fid: injection_dict = pickle.load(fid) else: injection_dict = None @@ -540,26 +639,28 @@ def action_call(action_name): # Build params from config and param_secrets (migrate params to correct config catagories), save result to params.pkl params = build_params(emhass_conf, params_secrets, config, app.logger) if type(params) is bool: - raise Exception("A error has occurred while building params") + raise Exception("A error has occurred while building params") # Update params with local variables params["optim_conf"]["costfun"] = costfun params["optim_conf"]["logging_level"] = logging_level - + # Save params to file for later reference - if os.path.exists(str(emhass_conf['data_path'])): - with open(str(emhass_conf['data_path'] / 'params.pkl'), "wb") as fid: + if os.path.exists(str(emhass_conf["data_path"])): + with open(str(emhass_conf["data_path"] / "params.pkl"), "wb") as fid: pickle.dump((config_path, params), fid) - else: - raise Exception("missing: " + str(emhass_conf['data_path'])) + else: + raise Exception("missing: " + str(emhass_conf["data_path"])) # Define loggers - ch = logging.StreamHandler() - formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + ch = logging.StreamHandler() + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) ch.setFormatter(formatter) # Action file logger - fileLogger = logging.FileHandler(str(emhass_conf['data_path'] / 'actionLogs.txt')) - formatter = logging.Formatter('%(levelname)s - %(name)s - %(message)s') - fileLogger.setFormatter(formatter) # add format to Handler + fileLogger = logging.FileHandler(str(emhass_conf["data_path"] / "actionLogs.txt")) + formatter = logging.Formatter("%(levelname)s - %(name)s - %(message)s") + fileLogger.setFormatter(formatter) # add format to Handler if logging_level == "DEBUG": app.logger.setLevel(logging.DEBUG) ch.setLevel(logging.DEBUG) @@ -582,29 +683,34 @@ def action_call(action_name): fileLogger.setLevel(logging.DEBUG) app.logger.propagate = False app.logger.addHandler(ch) - app.logger.addHandler(fileLogger) + app.logger.addHandler(fileLogger) # Clear Action File logger file, ready for new instance clearFileLog() - # If entity_path exists, remove any entity/metadata files - entity_path = emhass_conf['data_path'] / "entities" - if os.path.exists(entity_path): + # If entity_path exists, remove any entity/metadata files + entity_path = emhass_conf["data_path"] / "entities" + if os.path.exists(entity_path): entity_pathContents = os.listdir(entity_path) if len(entity_pathContents) > 0: for entity in entity_pathContents: os.remove(entity_path / entity) - + # Initialise continual publish thread list continual_publish_thread = [] - + # Launch server - port = int(os.environ.get('PORT', 5000)) - app.logger.info("Launching the emhass webserver at: http://"+server_ip+":"+str(port)) - app.logger.info("Home Assistant data fetch will be performed using url: "+params_secrets['hass_url']) - app.logger.info("The data path is: "+str(emhass_conf['data_path'])) - app.logger.info("The logging is: "+str(logging_level)) + port = int(os.environ.get("PORT", 5000)) + app.logger.info( + "Launching the emhass webserver at: http://" + server_ip + ":" + str(port) + ) + app.logger.info( + "Home Assistant data fetch will be performed using url: " + + params_secrets["hass_url"] + ) + app.logger.info("The data path is: " + str(emhass_conf["data_path"])) + app.logger.info("The logging is: " + str(logging_level)) try: - app.logger.info("Using core emhass version: "+version('emhass')) + app.logger.info("Using core emhass version: " + version("emhass")) except PackageNotFoundError: app.logger.info("Using development emhass version") - serve(app, host=server_ip, port=port, threads=8) \ No newline at end of file + serve(app, host=server_ip, port=port, threads=8) diff --git a/tests/test_command_line_utils.py b/tests/test_command_line_utils.py index d8b10a87..e083c52f 100644 --- a/tests/test_command_line_utils.py +++ b/tests/test_command_line_utils.py @@ -1,329 +1,551 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +import copy +import json +import pathlib import unittest from unittest.mock import patch -import pandas as pd -import pathlib, json, copy + import numpy as np +import pandas as pd -from emhass.command_line import set_input_data_dict +from emhass import utils from emhass.command_line import ( - perfect_forecast_optim, dayahead_forecast_optim, - naive_mpc_optim, -) -from emhass.command_line import ( forecast_model_fit, forecast_model_predict, forecast_model_tune, + main, + naive_mpc_optim, + perfect_forecast_optim, + publish_data, regressor_model_fit, regressor_model_predict, + set_input_data_dict, ) -from emhass.command_line import publish_data -from emhass.command_line import main -from emhass import utils # The root folder root = pathlib.Path(utils.get_root(__file__, num_parent=2)) # Build emhass_conf paths emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create loggerW logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) + class TestCommandLineUtils(unittest.TestCase): - @staticmethod def get_test_params(): # Build params with default config and secrets - if emhass_conf['defaults_path'].exists(): - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = utils.build_secrets(emhass_conf,logger,no_response=True) - params = utils.build_params(emhass_conf,secrets,config,logger) + if emhass_conf["defaults_path"].exists(): + config = utils.build_config( + emhass_conf, logger, emhass_conf["defaults_path"] + ) + _, secrets = utils.build_secrets(emhass_conf, logger, no_response=True) + params = utils.build_params(emhass_conf, secrets, config, logger) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) return params def setUp(self): params = TestCommandLineUtils.get_test_params() # Add runtime parameters for forecast lists runtimeparams = { - 'pv_power_forecast':[i+1 for i in range(48)], - 'load_power_forecast':[i+1 for i in range(48)], - 'load_cost_forecast':[i+1 for i in range(48)], - 'prod_price_forecast':[i+1 for i in range(48)] + "pv_power_forecast": [i + 1 for i in range(48)], + "load_power_forecast": [i + 1 for i in range(48)], + "load_cost_forecast": [i + 1 for i in range(48)], + "prod_price_forecast": [i + 1 for i in range(48)], } self.runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams self.params_json = json.dumps(params) - + # Test input data for actions (using data from file) def test_set_input_data_dict(self): - costfun = 'profit' + costfun = "profit" # Test dayahead - action = 'dayahead-optim' - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + action = "dayahead-optim" + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) self.assertIsInstance(input_data_dict, dict) - self.assertTrue(input_data_dict['df_input_data'] == None) - self.assertIsInstance(input_data_dict['df_input_data_dayahead'], pd.DataFrame) - self.assertTrue(input_data_dict['df_input_data_dayahead'].index.freq is not None) - self.assertTrue(input_data_dict['df_input_data_dayahead'].isnull().sum().sum()==0) - self.assertTrue(input_data_dict['fcst'].optim_conf['weather_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['load_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['load_cost_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['production_price_forecast_method']=='list') + self.assertTrue(input_data_dict["df_input_data"] == None) + self.assertIsInstance(input_data_dict["df_input_data_dayahead"], pd.DataFrame) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].index.freq is not None + ) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].isnull().sum().sum() == 0 + ) + self.assertTrue( + input_data_dict["fcst"].optim_conf["weather_forecast_method"] == "list" + ) + self.assertTrue( + input_data_dict["fcst"].optim_conf["load_forecast_method"] == "list" + ) + self.assertTrue( + input_data_dict["fcst"].optim_conf["load_cost_forecast_method"] == "list" + ) + self.assertTrue( + input_data_dict["fcst"].optim_conf["production_price_forecast_method"] + == "list" + ) # Test publish data - action = 'publish-data' - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) - self.assertTrue(input_data_dict['df_input_data'] == None) - self.assertTrue(input_data_dict['df_input_data_dayahead'] == None) - self.assertTrue(input_data_dict['P_PV_forecast'] == None) - self.assertTrue(input_data_dict['P_load_forecast'] == None) + action = "publish-data" + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) + self.assertTrue(input_data_dict["df_input_data"] == None) + self.assertTrue(input_data_dict["df_input_data_dayahead"] == None) + self.assertTrue(input_data_dict["P_PV_forecast"] == None) + self.assertTrue(input_data_dict["P_load_forecast"] == None) # Test naive mpc - action = 'naive-mpc-optim' - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + action = "naive-mpc-optim" + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) self.assertIsInstance(input_data_dict, dict) - self.assertIsInstance(input_data_dict['df_input_data_dayahead'], pd.DataFrame) - self.assertTrue(input_data_dict['df_input_data_dayahead'].index.freq is not None) - self.assertTrue(input_data_dict['df_input_data_dayahead'].isnull().sum().sum()==0) - self.assertTrue(len(input_data_dict['df_input_data_dayahead'])==10) # The default value for prediction_horizon + self.assertIsInstance(input_data_dict["df_input_data_dayahead"], pd.DataFrame) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].index.freq is not None + ) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].isnull().sum().sum() == 0 + ) + self.assertTrue( + len(input_data_dict["df_input_data_dayahead"]) == 10 + ) # The default value for prediction_horizon # Test Naive mpc with a shorter forecast = runtimeparams = { - 'pv_power_forecast':[1,2,3,4,5,6,7,8,9,10], - 'load_power_forecast':[1,2,3,4,5,6,7,8,9,10], - 'load_cost_forecast':[1,2,3,4,5,6,7,8,9,10], - 'prod_price_forecast':[1,2,3,4,5,6,7,8,9,10] + "pv_power_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + "load_power_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + "load_cost_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + "prod_price_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], } runtimeparams_json = json.dumps(runtimeparams) params = copy.deepcopy(json.loads(self.params_json)) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) self.assertIsInstance(input_data_dict, dict) - self.assertIsInstance(input_data_dict['df_input_data_dayahead'], pd.DataFrame) - self.assertTrue(input_data_dict['df_input_data_dayahead'].index.freq is not None) - self.assertTrue(input_data_dict['df_input_data_dayahead'].isnull().sum().sum()==0) - self.assertTrue(len(input_data_dict['df_input_data_dayahead'])==10) # The default value for prediction_horizon + self.assertIsInstance(input_data_dict["df_input_data_dayahead"], pd.DataFrame) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].index.freq is not None + ) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].isnull().sum().sum() == 0 + ) + self.assertTrue( + len(input_data_dict["df_input_data_dayahead"]) == 10 + ) # The default value for prediction_horizon # Test naive mpc with a shorter forecast and prediction horizon = 10 - action = 'naive-mpc-optim' - runtimeparams['prediction_horizon'] = 10 + action = "naive-mpc-optim" + runtimeparams["prediction_horizon"] = 10 runtimeparams_json = json.dumps(runtimeparams) params = copy.deepcopy(json.loads(self.params_json)) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) self.assertIsInstance(input_data_dict, dict) - self.assertIsInstance(input_data_dict['df_input_data_dayahead'], pd.DataFrame) - self.assertTrue(input_data_dict['df_input_data_dayahead'].index.freq is not None) - self.assertTrue(input_data_dict['df_input_data_dayahead'].isnull().sum().sum()==0) - self.assertTrue(len(input_data_dict['df_input_data_dayahead'])==10) # The fixed value for prediction_horizon + self.assertIsInstance(input_data_dict["df_input_data_dayahead"], pd.DataFrame) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].index.freq is not None + ) + self.assertTrue( + input_data_dict["df_input_data_dayahead"].isnull().sum().sum() == 0 + ) + self.assertTrue( + len(input_data_dict["df_input_data_dayahead"]) == 10 + ) # The fixed value for prediction_horizon # Test passing just load cost and prod price as lists - action = 'dayahead-optim' + action = "dayahead-optim" params = TestCommandLineUtils.get_test_params() runtimeparams = { - 'load_cost_forecast':[i+1 for i in range(48)], - 'prod_price_forecast':[i+1 for i in range(48)] + "load_cost_forecast": [i + 1 for i in range(48)], + "prod_price_forecast": [i + 1 for i in range(48)], } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) - self.assertTrue(input_data_dict['fcst'].optim_conf['load_cost_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['production_price_forecast_method']=='list') - + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) + self.assertTrue( + input_data_dict["fcst"].optim_conf["load_cost_forecast_method"] == "list" + ) + self.assertTrue( + input_data_dict["fcst"].optim_conf["production_price_forecast_method"] + == "list" + ) + # Test day-ahead optimization def test_webserver_get_injection_dict(self): - costfun = 'profit' - action = 'dayahead-optim' - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + costfun = "profit" + action = "dayahead-optim" + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = dayahead_forecast_optim(input_data_dict, logger, debug=True) injection_dict = utils.get_injection_dict(opt_res) self.assertIsInstance(injection_dict, dict) - self.assertIsInstance(injection_dict['table1'], str) - self.assertIsInstance(injection_dict['table2'], str) - + self.assertIsInstance(injection_dict["table1"], str) + self.assertIsInstance(injection_dict["table2"], str) + # Test data formatting of dayahead optimization with load cost and prod price as lists def test_dayahead_forecast_optim(self): # Test dataframe output of profit dayahead optimization - costfun = 'profit' - action = 'dayahead-optim' + costfun = "profit" + action = "dayahead-optim" params = copy.deepcopy(json.loads(self.params_json)) - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = dayahead_forecast_optim(input_data_dict, logger, debug=True) self.assertIsInstance(opt_res, pd.DataFrame) - self.assertTrue(opt_res.isnull().sum().sum()==0) - self.assertTrue(len(opt_res)==len(params['passed_data']['pv_power_forecast'])) + self.assertTrue(opt_res.isnull().sum().sum() == 0) + self.assertTrue(len(opt_res) == len(params["passed_data"]["pv_power_forecast"])) # Test dayahead output, passing just load cost and prod price as runtime lists (costfun=profit) - action = 'dayahead-optim' + action = "dayahead-optim" params = TestCommandLineUtils.get_test_params() runtimeparams = { - 'load_cost_forecast':[i+1 for i in range(48)], - 'prod_price_forecast':[i+1 for i in range(48)] + "load_cost_forecast": [i + 1 for i in range(48)], + "prod_price_forecast": [i + 1 for i in range(48)], } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = dayahead_forecast_optim(input_data_dict, logger, debug=True) self.assertIsInstance(opt_res, pd.DataFrame) - self.assertTrue(opt_res.isnull().sum().sum()==0) - self.assertTrue(input_data_dict['fcst'].optim_conf['load_cost_forecast_method']=='list') - self.assertTrue(input_data_dict['fcst'].optim_conf['production_price_forecast_method']=='list') - self.assertEqual(opt_res['unit_load_cost'].values.tolist(), runtimeparams['load_cost_forecast']) - self.assertEqual(opt_res['unit_prod_price'].values.tolist(), runtimeparams['prod_price_forecast']) - + self.assertTrue(opt_res.isnull().sum().sum() == 0) + self.assertTrue( + input_data_dict["fcst"].optim_conf["load_cost_forecast_method"] == "list" + ) + self.assertTrue( + input_data_dict["fcst"].optim_conf["production_price_forecast_method"] + == "list" + ) + self.assertEqual( + opt_res["unit_load_cost"].values.tolist(), + runtimeparams["load_cost_forecast"], + ) + self.assertEqual( + opt_res["unit_prod_price"].values.tolist(), + runtimeparams["prod_price_forecast"], + ) + # Test dataframe outpit of perfect forecast optimization def test_perfect_forecast_optim(self): - costfun = 'profit' - action = 'perfect-optim' - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + costfun = "profit" + action = "perfect-optim" + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = perfect_forecast_optim(input_data_dict, logger, debug=True) self.assertIsInstance(opt_res, pd.DataFrame) - self.assertTrue(opt_res.isnull().sum().sum()==0) + self.assertTrue(opt_res.isnull().sum().sum() == 0) self.assertIsInstance(opt_res.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - self.assertTrue('cost_fun_'+input_data_dict["costfun"] in opt_res.columns) - + self.assertIsInstance( + opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + self.assertTrue("cost_fun_" + input_data_dict["costfun"] in opt_res.columns) + # Test naive mpc optimization def test_naive_mpc_optim(self): # Test mpc optimization - costfun = 'profit' - action = 'naive-mpc-optim' + costfun = "profit" + action = "naive-mpc-optim" params = copy.deepcopy(json.loads(self.params_json)) - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = naive_mpc_optim(input_data_dict, logger, debug=True) self.assertIsInstance(opt_res, pd.DataFrame) - self.assertTrue(opt_res.isnull().sum().sum()==0) - self.assertTrue(len(opt_res)==10) + self.assertTrue(opt_res.isnull().sum().sum() == 0) + self.assertTrue(len(opt_res) == 10) # Test mpc optimization with runtime parameters similar to the documentation - runtimeparams = {"pv_power_forecast": - [1,2,3,4,5,6,7,8,9,10], - "prediction_horizon":10, "soc_init":0.5,"soc_final":0.6,'operating_hours_of_each_deferrable_load':[1,3],'start_timesteps_of_each_deferrable_load':[-3,0],'end_timesteps_of_each_deferrable_load':[8,0]} + runtimeparams = { + "pv_power_forecast": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + "prediction_horizon": 10, + "soc_init": 0.5, + "soc_final": 0.6, + "operating_hours_of_each_deferrable_load": [1, 3], + "start_timesteps_of_each_deferrable_load": [-3, 0], + "end_timesteps_of_each_deferrable_load": [8, 0], + } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams - params['optim_conf']['weather_forecast_method'] = 'list' - params['optim_conf']['load_forecast_method'] = 'naive' - params['optim_conf']['load_cost_forecast_method'] = 'hp_hc_periods' - params['optim_conf']['production_price_forecast_method'] = 'constant' + params["passed_data"] = runtimeparams + params["optim_conf"]["weather_forecast_method"] = "list" + params["optim_conf"]["load_forecast_method"] = "naive" + params["optim_conf"]["load_cost_forecast_method"] = "hp_hc_periods" + params["optim_conf"]["production_price_forecast_method"] = "constant" params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = naive_mpc_optim(input_data_dict, logger, debug=True) self.assertIsInstance(opt_res, pd.DataFrame) - self.assertTrue(opt_res.isnull().sum().sum()==0) - self.assertTrue(len(opt_res)==10) + self.assertTrue(opt_res.isnull().sum().sum() == 0) + self.assertTrue(len(opt_res) == 10) # Test publish after passing the forecast as list # with method_ts_round=first - costfun = 'profit' - action = 'naive-mpc-optim' + costfun = "profit" + action = "naive-mpc-optim" params = copy.deepcopy(json.loads(self.params_json)) - params['retrieve_hass_conf']['method_ts_round'] = 'first' + params["retrieve_hass_conf"]["method_ts_round"] = "first" params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = naive_mpc_optim(input_data_dict, logger, debug=True) - action = 'publish-data' - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, None, - action, logger, get_data_from_file=True) + action = "publish-data" + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + None, + action, + logger, + get_data_from_file=True, + ) opt_res_first = publish_data(input_data_dict, logger, opt_res_latest=opt_res) - self.assertTrue(len(opt_res_first)==1) + self.assertTrue(len(opt_res_first) == 1) # test mpc and publish with method_ts_round=last and set_use_battery=true - action = 'naive-mpc-optim' + action = "naive-mpc-optim" params = copy.deepcopy(json.loads(self.params_json)) - params['retrieve_hass_conf']['method_ts_round'] = 'last' - params['optim_conf']['set_use_battery'] = True + params["retrieve_hass_conf"]["method_ts_round"] = "last" + params["optim_conf"]["set_use_battery"] = True params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) opt_res = naive_mpc_optim(input_data_dict, logger, debug=True) - action = 'publish-data' - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, None, - action, logger, get_data_from_file=True) + action = "publish-data" + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + None, + action, + logger, + get_data_from_file=True, + ) opt_res_last = publish_data(input_data_dict, logger, opt_res_latest=opt_res) - self.assertTrue(len(opt_res_last)==1) - # Reproduce when trying to publish data params=None and runtimeparams=None - # action = 'publish-data' - # input_data_dict = set_input_data_dict(emhass_conf, costfun, None, None, - # action, logger, get_data_from_file=True) - # opt_res_last = publish_data(input_data_dict, logger, opt_res_latest=opt_res) - # self.assertTrue(len(opt_res_last)==1) + self.assertTrue(len(opt_res_last) == 1) + # Reproduce when trying to publish data params=None and runtimeparams=None + # action = 'publish-data' + # input_data_dict = set_input_data_dict(emhass_conf, costfun, None, None, + # action, logger, get_data_from_file=True) + # opt_res_last = publish_data(input_data_dict, logger, opt_res_latest=opt_res) + # self.assertTrue(len(opt_res_last)==1) # Check if status is published from datetime import datetime - now_precise = datetime.now(input_data_dict['retrieve_hass_conf']['time_zone']).replace(second=0, microsecond=0) - idx_closest = opt_res.index.get_indexer([now_precise], method='nearest')[0] - custom_cost_fun_id = {"entity_id": "sensor.optim_status", "unit_of_measurement": "", "friendly_name": "EMHASS optimization status"} + + now_precise = datetime.now( + input_data_dict["retrieve_hass_conf"]["time_zone"] + ).replace(second=0, microsecond=0) + idx_closest = opt_res.index.get_indexer([now_precise], method="nearest")[0] + custom_cost_fun_id = { + "entity_id": "sensor.optim_status", + "unit_of_measurement": "", + "friendly_name": "EMHASS optimization status", + } publish_prefix = "" - response, data = input_data_dict['rh'].post_data(opt_res['optim_status'], idx_closest, - custom_cost_fun_id["entity_id"], - custom_cost_fun_id["unit_of_measurement"], - custom_cost_fun_id["friendly_name"], - type_var = 'optim_status', - publish_prefix = publish_prefix) - self.assertTrue(hasattr(response, '__class__')) - self.assertTrue(data['attributes']['friendly_name'] == 'EMHASS optimization status') - + response, data = input_data_dict["rh"].post_data( + opt_res["optim_status"], + idx_closest, + custom_cost_fun_id["entity_id"], + custom_cost_fun_id["unit_of_measurement"], + custom_cost_fun_id["friendly_name"], + type_var="optim_status", + publish_prefix=publish_prefix, + ) + self.assertTrue(hasattr(response, "__class__")) + self.assertTrue( + data["attributes"]["friendly_name"] == "EMHASS optimization status" + ) + # Test outputs of fit, predict and tune def test_forecast_model_fit_predict_tune(self): - costfun = 'profit' - action = 'forecast-model-fit' # fit, predict and tune methods + costfun = "profit" + action = "forecast-model-fit" # fit, predict and tune methods params = TestCommandLineUtils.get_test_params() runtimeparams = { - 'historic_days_to_retrieve': 20, + "historic_days_to_retrieve": 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", "num_lags": 48, - "split_date_delta": '48h', + "split_date_delta": "48h", "perform_backtest": False, "model_predict_publish": True, "model_predict_entity_id": "sensor.p_load_forecast_knn", "model_predict_unit_of_measurement": "W", - "model_predict_friendly_name": "Load Power Forecast KNN regressor" + "model_predict_friendly_name": "Load Power Forecast KNN regressor", } runtimeparams_json = json.dumps(runtimeparams) - params['optim_conf']['load_forecast_method'] = 'skforecast' + params["optim_conf"]["load_forecast_method"] = "skforecast" params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) - self.assertTrue(input_data_dict['params']['passed_data']['model_type'] == 'load_forecast') - self.assertTrue(input_data_dict['params']['passed_data']['sklearn_model'] == 'KNeighborsRegressor') - self.assertTrue(input_data_dict['params']['passed_data']['perform_backtest'] == False) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) + self.assertTrue( + input_data_dict["params"]["passed_data"]["model_type"] == "load_forecast" + ) + self.assertTrue( + input_data_dict["params"]["passed_data"]["sklearn_model"] + == "KNeighborsRegressor" + ) + self.assertTrue( + input_data_dict["params"]["passed_data"]["perform_backtest"] == False + ) # Check that the default params are loaded - input_data_dict = set_input_data_dict(emhass_conf, costfun, self.params_json, self.runtimeparams_json, - action, logger, get_data_from_file=True) - self.assertTrue(input_data_dict['params']['passed_data']['model_type'] == 'load_forecast') - self.assertTrue(input_data_dict['params']['passed_data']['sklearn_model'] == 'KNeighborsRegressor') - self.assertIsInstance(input_data_dict['df_input_data'], pd.DataFrame) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + self.params_json, + self.runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) + self.assertTrue( + input_data_dict["params"]["passed_data"]["model_type"] == "load_forecast" + ) + self.assertTrue( + input_data_dict["params"]["passed_data"]["sklearn_model"] + == "KNeighborsRegressor" + ) + self.assertIsInstance(input_data_dict["df_input_data"], pd.DataFrame) # Test the fit method - df_fit_pred, df_fit_pred_backtest, mlf = forecast_model_fit(input_data_dict, logger, debug=True) + df_fit_pred, df_fit_pred_backtest, mlf = forecast_model_fit( + input_data_dict, logger, debug=True + ) self.assertIsInstance(df_fit_pred, pd.DataFrame) self.assertTrue(df_fit_pred_backtest == None) # Test ijection_dict for fit method on webui injection_dict = utils.get_injection_dict_forecast_model_fit(df_fit_pred, mlf) self.assertIsInstance(injection_dict, dict) - self.assertIsInstance(injection_dict['figure_0'], str) + self.assertIsInstance(injection_dict["figure_0"], str) # Test the predict method on observations following the train period - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) - df_pred = forecast_model_predict(input_data_dict, logger, use_last_window=False, debug=True, mlf=mlf) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) + df_pred = forecast_model_predict( + input_data_dict, logger, use_last_window=False, debug=True, mlf=mlf + ) self.assertIsInstance(df_pred, pd.Series) self.assertTrue(df_pred.isnull().sum().sum() == 0) # Now a predict using last_window @@ -425,17 +647,38 @@ def test_regressor_model_fit_predict(self): regressor_model_predict(input_data_dict, logger, debug=True, mlr=mlr) - # CLI test action that does not exist - @patch('sys.argv', ['main', '--action', 'test', '--config', str(emhass_conf['config_path']), - '--debug', 'True']) + @patch( + "sys.argv", + [ + "main", + "--action", + "test", + "--config", + str(emhass_conf["config_path"]), + "--debug", + "True", + ], + ) def test_main_wrong_action(self): opt_res = main() self.assertEqual(opt_res, None) - - # CLI test action perfect-optim action - @patch('sys.argv', ['main', '--action', 'perfect-optim', '--config', str(emhass_conf['config_path']), - '--debug', 'True', '--params', json.dumps(get_test_params())]) + + # CLI test action perfect-optim action + @patch( + "sys.argv", + [ + "main", + "--action", + "perfect-optim", + "--config", + str(emhass_conf["config_path"]), + "--debug", + "True", + "--params", + json.dumps(get_test_params()), + ], + ) def test_main_perfect_forecast_optim(self): opt_res = main() self.assertIsInstance(opt_res, pd.DataFrame) @@ -448,18 +691,44 @@ def test_main_perfect_forecast_optim(self): # CLI test dayahead forecast optimzation action def test_main_dayahead_forecast_optim(self): - with patch('sys.argv', ['main', '--action', 'dayahead-optim', '--config', str(emhass_conf['config_path']), - '--params', self.params_json, '--runtimeparams', self.runtimeparams_json, - '--debug', 'True']): + with patch( + "sys.argv", + [ + "main", + "--action", + "dayahead-optim", + "--config", + str(emhass_conf["config_path"]), + "--params", + self.params_json, + "--runtimeparams", + self.runtimeparams_json, + "--debug", + "True", + ], + ): opt_res = main() self.assertIsInstance(opt_res, pd.DataFrame) self.assertTrue(opt_res.isnull().sum().sum() == 0) # CLI test naive mpc optimzation action def test_main_naive_mpc_optim(self): - with patch('sys.argv', ['main', '--action', 'naive-mpc-optim', '--config', str(emhass_conf['config_path']), - '--params', self.params_json, '--runtimeparams', self.runtimeparams_json, - '--debug', 'True']): + with patch( + "sys.argv", + [ + "main", + "--action", + "naive-mpc-optim", + "--config", + str(emhass_conf["config_path"]), + "--params", + self.params_json, + "--runtimeparams", + self.runtimeparams_json, + "--debug", + "True", + ], + ): opt_res = main() self.assertIsInstance(opt_res, pd.DataFrame) self.assertTrue(opt_res.isnull().sum().sum() == 0) @@ -469,30 +738,43 @@ def test_main_naive_mpc_optim(self): def test_main_forecast_model_fit(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { - 'historic_days_to_retrieve': 20, + "historic_days_to_retrieve": 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", "num_lags": 48, - "split_date_delta": '48h', - "perform_backtest": False + "split_date_delta": "48h", + "perform_backtest": False, } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams - params['optim_conf']['load_forecast_method'] = 'skforecast' + params["passed_data"] = runtimeparams + params["optim_conf"]["load_forecast_method"] = "skforecast" params_json = json.dumps(params) - with patch('sys.argv', ['main', '--action', 'forecast-model-fit', '--config', str(emhass_conf['config_path']), - '--params', params_json, '--runtimeparams', runtimeparams_json, - '--debug', 'True']): + with patch( + "sys.argv", + [ + "main", + "--action", + "forecast-model-fit", + "--config", + str(emhass_conf["config_path"]), + "--params", + params_json, + "--runtimeparams", + runtimeparams_json, + "--debug", + "True", + ], + ): df_fit_pred, df_fit_pred_backtest, mlf = main() self.assertIsInstance(df_fit_pred, pd.DataFrame) self.assertTrue(df_fit_pred_backtest == None) - + # CLI test forecast model predict action def test_main_forecast_model_predict(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { - 'historic_days_to_retrieve': 20, + "historic_days_to_retrieve": 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -502,11 +784,24 @@ def test_main_forecast_model_predict(self): } runtimeparams_json = json.dumps(runtimeparams) params["passed_data"] = runtimeparams - params["optim_conf"]['load_forecast_method'] = "skforecast" + params["optim_conf"]["load_forecast_method"] = "skforecast" params_json = json.dumps(params) - with patch('sys.argv', ['main', '--action', 'forecast-model-predict', '--config', str(emhass_conf['config_path']), - '--params', params_json, '--runtimeparams', runtimeparams_json, - '--debug', 'True']): + with patch( + "sys.argv", + [ + "main", + "--action", + "forecast-model-predict", + "--config", + str(emhass_conf["config_path"]), + "--params", + params_json, + "--runtimeparams", + runtimeparams_json, + "--debug", + "True", + ], + ): df_pred = main() self.assertIsInstance(df_pred, pd.Series) self.assertTrue(df_pred.isnull().sum().sum() == 0) @@ -515,7 +810,7 @@ def test_main_forecast_model_predict(self): def test_main_forecast_model_tune(self): params = copy.deepcopy(json.loads(self.params_json)) runtimeparams = { - 'historic_days_to_retrieve': 20, + "historic_days_to_retrieve": 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", @@ -525,11 +820,24 @@ def test_main_forecast_model_tune(self): } runtimeparams_json = json.dumps(runtimeparams) params["passed_data"] = runtimeparams - params["optim_conf"]['load_forecast_method'] = "skforecast" + params["optim_conf"]["load_forecast_method"] = "skforecast" params_json = json.dumps(params) - with patch('sys.argv', ['main', '--action', 'forecast-model-tune', '--config', str(emhass_conf['config_path']), - '--params', params_json, '--runtimeparams', runtimeparams_json, - '--debug', 'True']): + with patch( + "sys.argv", + [ + "main", + "--action", + "forecast-model-tune", + "--config", + str(emhass_conf["config_path"]), + "--params", + params_json, + "--runtimeparams", + runtimeparams_json, + "--debug", + "True", + ], + ): df_pred_optim, mlf = main() self.assertIsInstance(df_pred_optim, pd.DataFrame) self.assertTrue(mlf.is_tuned == True) @@ -582,7 +890,7 @@ def test_main_regressor_model_predict(self): } runtimeparams_json = json.dumps(runtimeparams) params["passed_data"] = runtimeparams - params["optim_conf"]['load_forecast_method'] = "skforecast" + params["optim_conf"]["load_forecast_method"] = "skforecast" params_json = json.dumps(params) with patch( "sys.argv", @@ -602,15 +910,26 @@ def test_main_regressor_model_predict(self): ): prediction = main() self.assertIsInstance(prediction, np.ndarray) - + # CLI test publish data action - @patch('sys.argv', ['main', '--action', 'publish-data', '--config', str(emhass_conf['config_path']), - '--debug', 'True']) + @patch( + "sys.argv", + [ + "main", + "--action", + "publish-data", + "--config", + str(emhass_conf["config_path"]), + "--debug", + "True", + ], + ) def test_main_publish_data(self): opt_res = main() self.assertFalse(opt_res.empty) - -if __name__ == '__main__': + + +if __name__ == "__main__": unittest.main() ch.close() logger.removeHandler(ch) diff --git a/tests/test_forecast.py b/tests/test_forecast.py index 2f7b2fd3..38a370c5 100644 --- a/tests/test_forecast.py +++ b/tests/test_forecast.py @@ -1,554 +1,930 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -import unittest +import _pickle as cPickle +import bz2 +import copy +import json import os -import requests_mock +import pathlib +import pickle +import unittest + import pandas as pd -import pathlib, pickle, json, copy -import bz2 -import _pickle as cPickle +import requests_mock -from emhass.retrieve_hass import RetrieveHass +from emhass import utils from emhass.command_line import set_input_data_dict -from emhass.machine_learning_forecaster import MLForecaster from emhass.forecast import Forecast +from emhass.machine_learning_forecaster import MLForecaster from emhass.optimization import Optimization -from emhass import utils +from emhass.retrieve_hass import RetrieveHass # The root folder root = pathlib.Path(utils.get_root(__file__, num_parent=2)) # Build emhass_conf paths emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) + class TestForecast(unittest.TestCase): - @staticmethod def get_test_params(): params = {} # Build params with default config and secrets - if emhass_conf['defaults_path'].exists(): - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = utils.build_secrets(emhass_conf,logger,no_response=True) - params = utils.build_params(emhass_conf,secrets,config,logger) + if emhass_conf["defaults_path"].exists(): + config = utils.build_config( + emhass_conf, logger, emhass_conf["defaults_path"] + ) + _, secrets = utils.build_secrets(emhass_conf, logger, no_response=True) + params = utils.build_params(emhass_conf, secrets, config, logger) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) return params def setUp(self): self.get_data_from_file = True params = json.dumps(TestForecast.get_test_params()) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) - self.retrieve_hass_conf, self.optim_conf, self.plant_conf = \ - retrieve_hass_conf, optim_conf, plant_conf + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + params, logger + ) + self.retrieve_hass_conf, self.optim_conf, self.plant_conf = ( + retrieve_hass_conf, + optim_conf, + plant_conf, + ) # Create RetrieveHass object - self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + self.rh = RetrieveHass( + self.retrieve_hass_conf["hass_url"], + self.retrieve_hass_conf["long_lived_token"], + self.retrieve_hass_conf["optimization_time_step"], + self.retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) # Obtain sensor values from saved file if self.get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) - self.retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) - self.retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) - self.retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - self.retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + self.retrieve_hass_conf["sensor_power_load_no_var_loads"] = str( + self.var_list[0] + ) + self.retrieve_hass_conf["sensor_power_photovoltaics"] = str( + self.var_list[1] + ) + self.retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + self.retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] # Else obtain sensor values from HA else: - self.days_list = utils.get_days_list(self.retrieve_hass_conf['historic_days_to_retrieve']) - self.var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads'], self.retrieve_hass_conf['sensor_power_photovoltaics']] - self.rh.get_data(self.days_list, self.var_list, - minimal_response=False, significant_changes_only=False) + self.days_list = utils.get_days_list( + self.retrieve_hass_conf["historic_days_to_retrieve"] + ) + self.var_list = [ + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + self.retrieve_hass_conf["sensor_power_photovoltaics"], + ] + self.rh.get_data( + self.days_list, + self.var_list, + minimal_response=False, + significant_changes_only=False, + ) # Prepare data for optimization - self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = self.retrieve_hass_conf['load_negative'], - set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], - var_interp = self.retrieve_hass_conf['sensor_linear_interp']) + self.rh.prepare_data( + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=self.retrieve_hass_conf["load_negative"], + set_zero_min=self.retrieve_hass_conf["set_zero_min"], + var_replace_zero=self.retrieve_hass_conf["sensor_replace_zero"], + var_interp=self.retrieve_hass_conf["sensor_linear_interp"], + ) self.df_input_data = self.rh.df_final.copy() # Create forecast Object - self.fcst = Forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - params, emhass_conf, logger, get_data_from_file=self.get_data_from_file) + self.fcst = Forecast( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=self.get_data_from_file, + ) # The default for test is csv read - self.df_weather_scrap = self.fcst.get_weather_forecast(method='csv') + self.df_weather_scrap = self.fcst.get_weather_forecast(method="csv") self.P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather_scrap) - self.P_load_forecast = self.fcst.get_load_forecast(method=optim_conf['load_forecast_method']) - self.df_input_data_dayahead = pd.concat([self.P_PV_forecast, self.P_load_forecast], axis=1) - self.df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - 'profit', emhass_conf, logger) + self.P_load_forecast = self.fcst.get_load_forecast( + method=optim_conf["load_forecast_method"] + ) + self.df_input_data_dayahead = pd.concat( + [self.P_PV_forecast, self.P_load_forecast], axis=1 + ) + self.df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"] + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + "profit", + emhass_conf, + logger, + ) # Manually create input data (from formatted parameter) dictionary self.input_data_dict = { - 'emhass_conf': emhass_conf, - 'retrieve_hass_conf': self.retrieve_hass_conf, - 'df_input_data': self.df_input_data, - 'df_input_data_dayahead': self.df_input_data_dayahead, - 'opt': self.opt, - 'rh': self.rh, - 'fcst': self.fcst, - 'P_PV_forecast': self.P_PV_forecast, - 'P_load_forecast': self.P_load_forecast, - 'params': params + "emhass_conf": emhass_conf, + "retrieve_hass_conf": self.retrieve_hass_conf, + "df_input_data": self.df_input_data, + "df_input_data_dayahead": self.df_input_data_dayahead, + "opt": self.opt, + "rh": self.rh, + "fcst": self.fcst, + "P_PV_forecast": self.P_PV_forecast, + "P_load_forecast": self.P_load_forecast, + "params": params, } - - - # Test weather forecast dataframe output based on saved csv file + + # Test weather forecast dataframe output based on saved csv file def test_get_weather_forecast_csv(self): # Test dataframe from get weather forecast - self.df_weather_csv = self.fcst.get_weather_forecast(method='csv') - self.assertEqual(self.fcst.weather_forecast_method, 'csv') + self.df_weather_csv = self.fcst.get_weather_forecast(method="csv") + self.assertEqual(self.fcst.weather_forecast_method, "csv") self.assertIsInstance(self.df_weather_csv, type(pd.DataFrame())) - self.assertIsInstance(self.df_weather_csv.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.df_weather_csv.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + self.df_weather_csv.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.df_weather_csv.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(self.df_weather_csv.index.tz, self.fcst.time_zone) - self.assertTrue(self.fcst.start_forecast < ts for ts in self.df_weather_csv.index) - self.assertEqual(len(self.df_weather_csv), - int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) + self.assertTrue( + self.fcst.start_forecast < ts for ts in self.df_weather_csv.index + ) + self.assertEqual( + len(self.df_weather_csv), + int( + self.optim_conf["delta_forecast_daily"].total_seconds() + / 3600 + / self.fcst.timeStep + ), + ) # Test dataframe from get power from weather P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather_csv) self.assertIsInstance(P_PV_forecast, pd.core.series.Series) - self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.df_weather_csv), len(P_PV_forecast)) - df_weather_none = self.fcst.get_weather_forecast(method='none') + df_weather_none = self.fcst.get_weather_forecast(method="none") self.assertTrue(df_weather_none == None) - + # Test output weather forecast using scrapper with mock get request data def test_get_weather_forecast_scrapper_method_mock(self): with requests_mock.mock() as m: - data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_scrapper_get_method.pbz2'), "rb") + data = bz2.BZ2File( + str( + emhass_conf["data_path"] / "test_response_scrapper_get_method.pbz2" + ), + "rb", + ) data = cPickle.load(data) - get_url = "https://clearoutside.com/forecast/"+str(round(self.fcst.lat, 2))+"/"+str(round(self.fcst.lon, 2))+"?desktop=true" + get_url = ( + "https://clearoutside.com/forecast/" + + str(round(self.fcst.lat, 2)) + + "/" + + str(round(self.fcst.lon, 2)) + + "?desktop=true" + ) m.get(get_url, content=data) # Test dataframe output from get weather forecast - df_weather_scrap = self.fcst.get_weather_forecast(method='scrapper') + df_weather_scrap = self.fcst.get_weather_forecast(method="scrapper") self.assertIsInstance(df_weather_scrap, type(pd.DataFrame())) - self.assertIsInstance(df_weather_scrap.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(df_weather_scrap.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + df_weather_scrap.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + df_weather_scrap.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(df_weather_scrap.index.tz, self.fcst.time_zone) - self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_scrap.index) - self.assertEqual(len(df_weather_scrap), - int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) + self.assertTrue( + self.fcst.start_forecast < ts for ts in df_weather_scrap.index + ) + self.assertEqual( + len(df_weather_scrap), + int( + self.optim_conf["delta_forecast_daily"].total_seconds() + / 3600 + / self.fcst.timeStep + ), + ) # Test dataframe output from get power from weather forecast P_PV_forecast = self.fcst.get_power_from_weather(df_weather_scrap) self.assertIsInstance(P_PV_forecast, pd.core.series.Series) - self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(df_weather_scrap), len(P_PV_forecast)) # Test dataframe output from get power from weather forecast (with 2 PV plant's) - self.plant_conf['pv_module_model'] = [self.plant_conf['pv_module_model'][0], self.plant_conf['pv_module_model'][0]] - self.plant_conf['pv_inverter_model'] = [self.plant_conf['pv_inverter_model'][0], self.plant_conf['pv_inverter_model'][0]] - self.plant_conf['surface_tilt'] = [30, 45] - self.plant_conf['surface_azimuth'] = [270, 90] - self.plant_conf['modules_per_string'] = [8, 8] - self.plant_conf['strings_per_inverter'] = [1, 1] + self.plant_conf["pv_module_model"] = [ + self.plant_conf["pv_module_model"][0], + self.plant_conf["pv_module_model"][0], + ] + self.plant_conf["pv_inverter_model"] = [ + self.plant_conf["pv_inverter_model"][0], + self.plant_conf["pv_inverter_model"][0], + ] + self.plant_conf["surface_tilt"] = [30, 45] + self.plant_conf["surface_azimuth"] = [270, 90] + self.plant_conf["modules_per_string"] = [8, 8] + self.plant_conf["strings_per_inverter"] = [1, 1] P_PV_forecast = self.fcst.get_power_from_weather(df_weather_scrap) self.assertIsInstance(P_PV_forecast, pd.core.series.Series) - self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(df_weather_scrap), len(P_PV_forecast)) - - # Test output weather forecast using Solcast with mock get request data + + # Test output weather forecast using Solcast with mock get request data def test_get_weather_forecast_solcast_method_mock(self): - self.fcst.params = {'passed_data': {'weather_forecast_cache': False, 'weather_forecast_cache_only': False}} - self.fcst.retrieve_hass_conf['solcast_api_key'] = "123456" - self.fcst.retrieve_hass_conf['solcast_rooftop_id'] = "123456" - if os.path.isfile(emhass_conf['data_path'] / "weather_forecast_data.pkl"): - os.rename(emhass_conf['data_path'] / "weather_forecast_data.pkl", emhass_conf['data_path'] / "temp_weather_forecast_data.pkl") + self.fcst.params = { + "passed_data": { + "weather_forecast_cache": False, + "weather_forecast_cache_only": False, + } + } + self.fcst.retrieve_hass_conf["solcast_api_key"] = "123456" + self.fcst.retrieve_hass_conf["solcast_rooftop_id"] = "123456" + if os.path.isfile(emhass_conf["data_path"] / "weather_forecast_data.pkl"): + os.rename( + emhass_conf["data_path"] / "weather_forecast_data.pkl", + emhass_conf["data_path"] / "temp_weather_forecast_data.pkl", + ) with requests_mock.mock() as m: - data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_solcast_get_method.pbz2'), "rb") + data = bz2.BZ2File( + str(emhass_conf["data_path"] / "test_response_solcast_get_method.pbz2"), + "rb", + ) data = cPickle.load(data) - get_url = "https://api.solcast.com.au/rooftop_sites/123456/forecasts?hours=24" + get_url = ( + "https://api.solcast.com.au/rooftop_sites/123456/forecasts?hours=24" + ) m.get(get_url, json=data.json()) - df_weather_scrap = self.fcst.get_weather_forecast(method='solcast') + df_weather_scrap = self.fcst.get_weather_forecast(method="solcast") self.assertIsInstance(df_weather_scrap, type(pd.DataFrame())) - self.assertIsInstance(df_weather_scrap.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(df_weather_scrap.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + df_weather_scrap.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + df_weather_scrap.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(df_weather_scrap.index.tz, self.fcst.time_zone) - self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_scrap.index) - self.assertEqual(len(df_weather_scrap), - int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) - if os.path.isfile(emhass_conf['data_path'] / "temp_weather_forecast_data.pkl"): - os.rename(emhass_conf['data_path'] / "temp_weather_forecast_data.pkl", emhass_conf['data_path'] / "weather_forecast_data.pkl") - + self.assertTrue( + self.fcst.start_forecast < ts for ts in df_weather_scrap.index + ) + self.assertEqual( + len(df_weather_scrap), + int( + self.optim_conf["delta_forecast_daily"].total_seconds() + / 3600 + / self.fcst.timeStep + ), + ) + if os.path.isfile( + emhass_conf["data_path"] / "temp_weather_forecast_data.pkl" + ): + os.rename( + emhass_conf["data_path"] / "temp_weather_forecast_data.pkl", + emhass_conf["data_path"] / "weather_forecast_data.pkl", + ) + # Test output weather forecast using Forecast.Solar with mock get request data def test_get_weather_forecast_solarforecast_method_mock(self): with requests_mock.mock() as m: - data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_solarforecast_get_method.pbz2'), "rb") + data = bz2.BZ2File( + str( + emhass_conf["data_path"] + / "test_response_solarforecast_get_method.pbz2" + ), + "rb", + ) data = cPickle.load(data) - for i in range(len(self.plant_conf['pv_module_model'])): - get_url = "https://api.forecast.solar/estimate/"+str(round(self.fcst.lat, 2))+"/"+str(round(self.fcst.lon, 2))+\ - "/"+str(self.plant_conf['surface_tilt'][i])+"/"+str(self.plant_conf['surface_azimuth'][i]-180)+\ - "/"+str(5) + for i in range(len(self.plant_conf["pv_module_model"])): + get_url = ( + "https://api.forecast.solar/estimate/" + + str(round(self.fcst.lat, 2)) + + "/" + + str(round(self.fcst.lon, 2)) + + "/" + + str(self.plant_conf["surface_tilt"][i]) + + "/" + + str(self.plant_conf["surface_azimuth"][i] - 180) + + "/" + + str(5) + ) m.get(get_url, json=data) - df_weather_solarforecast = self.fcst.get_weather_forecast(method='solar.forecast') + df_weather_solarforecast = self.fcst.get_weather_forecast( + method="solar.forecast" + ) self.assertIsInstance(df_weather_solarforecast, type(pd.DataFrame())) - self.assertIsInstance(df_weather_solarforecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(df_weather_solarforecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + df_weather_solarforecast.index, + pd.core.indexes.datetimes.DatetimeIndex, + ) + self.assertIsInstance( + df_weather_solarforecast.index.dtype, + pd.core.dtypes.dtypes.DatetimeTZDtype, + ) self.assertEqual(df_weather_solarforecast.index.tz, self.fcst.time_zone) - self.assertTrue(self.fcst.start_forecast < ts for ts in df_weather_solarforecast.index) - self.assertEqual(len(df_weather_solarforecast), - int(self.optim_conf['delta_forecast_daily'].total_seconds()/3600/self.fcst.timeStep)) + self.assertTrue( + self.fcst.start_forecast < ts + for ts in df_weather_solarforecast.index + ) + self.assertEqual( + len(df_weather_solarforecast), + int( + self.optim_conf["delta_forecast_daily"].total_seconds() + / 3600 + / self.fcst.timeStep + ), + ) - # Test output weather forecast using passed runtime lists + # Test output weather forecast using passed runtime lists def test_get_forecasts_with_lists(self): # Load default params params = {} - if emhass_conf['defaults_path'].exists(): - with emhass_conf['defaults_path'].open('r') as data: + if emhass_conf["defaults_path"].exists(): + with emhass_conf["defaults_path"].open("r") as data: defaults = json.load(data) - updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + updated_emhass_conf, built_secrets = utils.build_secrets( + emhass_conf, logger + ) emhass_conf.update(updated_emhass_conf) - params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + params.update( + utils.build_params(emhass_conf, built_secrets, defaults, logger) + ) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) # Create 48 (1 day of data) long lists runtime forecasts parameters runtimeparams = { - 'pv_power_forecast':[i+1 for i in range(48)], - 'load_power_forecast':[i+1 for i in range(48)], - 'load_cost_forecast':[i+1 for i in range(48)], - 'prod_price_forecast':[i+1 for i in range(48)] + "pv_power_forecast": [i + 1 for i in range(48)], + "load_power_forecast": [i + 1 for i in range(48)], + "load_cost_forecast": [i + 1 for i in range(48)], + "prod_price_forecast": [i + 1 for i in range(48)], } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params_json,logger) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + params_json, logger + ) set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams_json, params_json, retrieve_hass_conf, - optim_conf, plant_conf, set_type, logger) + runtimeparams_json, + params_json, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) # Build RetrieveHass Object - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) # Obtain sensor values from saved file if self.get_data_from_file: - with open((emhass_conf['data_path'] / 'test_df_final.pkl'), 'rb') as inp: + with open((emhass_conf["data_path"] / "test_df_final.pkl"), "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(self.var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(self.var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] # Else obtain sensor values from HA else: - days_list = utils.get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) + days_list = utils.get_days_list( + retrieve_hass_conf["historic_days_to_retrieve"] + ) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, + var_list, + minimal_response=False, + significant_changes_only=False, + ) # Prepare data for optimization - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() # Build Forecast Object - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=True) + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=True, + ) # Obtain only 48 rows of data and remove last column for input df_input_data = copy.deepcopy(df_input_data).iloc[-49:-1] # Get Weather forecast with list, check dataframe output - P_PV_forecast = fcst.get_weather_forecast(method='list') + P_PV_forecast = fcst.get_weather_forecast(method="list") df_input_data.index = P_PV_forecast.index df_input_data.index.freq = rh.df_final.index.freq self.assertIsInstance(P_PV_forecast, type(pd.DataFrame())) - self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_PV_forecast.index.tz, fcst.time_zone) self.assertTrue(fcst.start_forecast < ts for ts in P_PV_forecast.index) self.assertTrue(P_PV_forecast.values[0][0] == 1) self.assertTrue(P_PV_forecast.values[-1][0] == 48) # Get load forecast with list, check dataframe output - P_load_forecast = fcst.get_load_forecast(method='list') + P_load_forecast = fcst.get_load_forecast(method="list") self.assertIsInstance(P_load_forecast, pd.core.series.Series) - self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_load_forecast.index.tz, fcst.time_zone) self.assertEqual(len(P_PV_forecast), len(P_load_forecast)) self.assertTrue(P_load_forecast.values[0] == 1) self.assertTrue(P_load_forecast.values[-1] == 48) # Get load cost forecast with list, check dataframe output - df_input_data = fcst.get_load_cost_forecast(df_input_data, method='list') + df_input_data = fcst.get_load_cost_forecast(df_input_data, method="list") self.assertTrue(fcst.var_load_cost in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - self.assertTrue(df_input_data['unit_load_cost'].values[0] == 1) - self.assertTrue(df_input_data['unit_load_cost'].values[-1] == 48) + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + self.assertTrue(df_input_data["unit_load_cost"].values[0] == 1) + self.assertTrue(df_input_data["unit_load_cost"].values[-1] == 48) # Get production price forecast with list, check dataframe output - df_input_data = fcst.get_prod_price_forecast(df_input_data, method='list') + df_input_data = fcst.get_prod_price_forecast(df_input_data, method="list") self.assertTrue(fcst.var_prod_price in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - self.assertTrue(df_input_data['unit_prod_price'].values[0] == 1) - self.assertTrue(df_input_data['unit_prod_price'].values[-1] == 48) - - # Test output weather forecast using longer passed runtime lists + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + self.assertTrue(df_input_data["unit_prod_price"].values[0] == 1) + self.assertTrue(df_input_data["unit_prod_price"].values[-1] == 48) + + # Test output weather forecast using longer passed runtime lists def test_get_forecasts_with_longer_lists(self): # Load default params params = {} set_type = "dayahead-optim" - if emhass_conf['defaults_path'].exists(): - with emhass_conf['defaults_path'].open('r') as data: + if emhass_conf["defaults_path"].exists(): + with emhass_conf["defaults_path"].open("r") as data: defaults = json.load(data) - updated_emhass_conf, built_secrets = utils.build_secrets(emhass_conf,logger) + updated_emhass_conf, built_secrets = utils.build_secrets( + emhass_conf, logger + ) emhass_conf.update(updated_emhass_conf) - params.update(utils.build_params(emhass_conf, built_secrets, defaults, logger)) + params.update( + utils.build_params(emhass_conf, built_secrets, defaults, logger) + ) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) # Create 3*48 (3 days of data) long lists runtime forecasts parameters runtimeparams = { - 'pv_power_forecast':[i+1 for i in range(3*48)], - 'load_power_forecast':[i+1 for i in range(3*48)], - 'load_cost_forecast':[i+1 for i in range(3*48)], - 'prod_price_forecast':[i+1 for i in range(3*48)] + "pv_power_forecast": [i + 1 for i in range(3 * 48)], + "load_power_forecast": [i + 1 for i in range(3 * 48)], + "load_cost_forecast": [i + 1 for i in range(3 * 48)], + "prod_price_forecast": [i + 1 for i in range(3 * 48)], } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params_json,logger) - optim_conf['delta_forecast_daily'] = pd.Timedelta(days=3) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + params_json, logger + ) + optim_conf["delta_forecast_daily"] = pd.Timedelta(days=3) params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams_json, params_json, retrieve_hass_conf, - optim_conf, plant_conf, set_type, logger) + runtimeparams_json, + params_json, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) # Create Forecast Object - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=True) + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=True, + ) # Get weather forecast with list, check dataframe output - P_PV_forecast = fcst.get_weather_forecast(method='list') + P_PV_forecast = fcst.get_weather_forecast(method="list") self.assertIsInstance(P_PV_forecast, type(pd.DataFrame())) - self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_PV_forecast.index.tz, fcst.time_zone) self.assertTrue(fcst.start_forecast < ts for ts in P_PV_forecast.index) self.assertTrue(P_PV_forecast.values[0][0] == 1) - self.assertTrue(P_PV_forecast.values[-1][0] == 3*48) + self.assertTrue(P_PV_forecast.values[-1][0] == 3 * 48) # Get load forecast with list, check dataframe output - P_load_forecast = fcst.get_load_forecast(method='list') + P_load_forecast = fcst.get_load_forecast(method="list") self.assertIsInstance(P_load_forecast, pd.core.series.Series) - self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_load_forecast.index.tz, fcst.time_zone) self.assertEqual(len(P_PV_forecast), len(P_load_forecast)) self.assertTrue(P_load_forecast.values[0] == 1) - self.assertTrue(P_load_forecast.values[-1] == 3*48) + self.assertTrue(P_load_forecast.values[-1] == 3 * 48) df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) df_input_data_dayahead = utils.set_df_index_freq(df_input_data_dayahead) - df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] + df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"] # Get load cost forecast with list, check dataframe output - df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead, method='list') + df_input_data_dayahead = fcst.get_load_cost_forecast( + df_input_data_dayahead, method="list" + ) self.assertTrue(fcst.var_load_cost in df_input_data_dayahead.columns) - self.assertTrue(df_input_data_dayahead.isnull().sum().sum()==0) + self.assertTrue(df_input_data_dayahead.isnull().sum().sum() == 0) self.assertTrue(df_input_data_dayahead[fcst.var_load_cost].iloc[0] == 1) - self.assertTrue(df_input_data_dayahead[fcst.var_load_cost].iloc[-1] == 3*48) + self.assertTrue(df_input_data_dayahead[fcst.var_load_cost].iloc[-1] == 3 * 48) # Get production price forecast with list, check dataframe output - df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead, method='list') + df_input_data_dayahead = fcst.get_prod_price_forecast( + df_input_data_dayahead, method="list" + ) self.assertTrue(fcst.var_prod_price in df_input_data_dayahead.columns) - self.assertTrue(df_input_data_dayahead.isnull().sum().sum()==0) + self.assertTrue(df_input_data_dayahead.isnull().sum().sum() == 0) self.assertTrue(df_input_data_dayahead[fcst.var_prod_price].iloc[0] == 1) - self.assertTrue(df_input_data_dayahead[fcst.var_prod_price].iloc[-1] == 3*48) + self.assertTrue(df_input_data_dayahead[fcst.var_prod_price].iloc[-1] == 3 * 48) # Test output values of weather forecast using passed runtime lists and saved sensor data def test_get_forecasts_with_lists_special_case(self): # Load default params params = {} - if emhass_conf['defaults_path'].exists(): - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = utils.build_secrets(emhass_conf,logger,no_response=True) - params = utils.build_params(emhass_conf,secrets,config,logger) + if emhass_conf["defaults_path"].exists(): + config = utils.build_config( + emhass_conf, logger, emhass_conf["defaults_path"] + ) + _, secrets = utils.build_secrets(emhass_conf, logger, no_response=True) + params = utils.build_params(emhass_conf, secrets, config, logger) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) # Create 48 (1 day of data) long lists runtime forecasts parameters runtimeparams = { - 'load_cost_forecast':[i+1 for i in range(48)], - 'prod_price_forecast':[i+1 for i in range(48)] + "load_cost_forecast": [i + 1 for i in range(48)], + "prod_price_forecast": [i + 1 for i in range(48)], } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams + params["passed_data"] = runtimeparams params_json = json.dumps(params) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params_json,logger) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + params_json, logger + ) set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams_json, params_json, retrieve_hass_conf, - optim_conf, plant_conf, set_type, logger) + runtimeparams_json, + params_json, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) # Create RetrieveHass Object - rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['optimization_time_step'], retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + rh = RetrieveHass( + retrieve_hass_conf["hass_url"], + retrieve_hass_conf["long_lived_token"], + retrieve_hass_conf["optimization_time_step"], + retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) # Obtain sensor values from saved file if self.get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: rh.df_final, days_list, var_list = pickle.load(inp) - retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) - retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) - retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + retrieve_hass_conf["sensor_power_load_no_var_loads"] = str(self.var_list[0]) + retrieve_hass_conf["sensor_power_photovoltaics"] = str(self.var_list[1]) + retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] # Else obtain sensor values from HA else: - days_list = utils.get_days_list(retrieve_hass_conf['historic_days_to_retrieve']) - var_list = [retrieve_hass_conf['sensor_power_load_no_var_loads'], retrieve_hass_conf['sensor_power_photovoltaics']] - rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False) + days_list = utils.get_days_list( + retrieve_hass_conf["historic_days_to_retrieve"] + ) + var_list = [ + retrieve_hass_conf["sensor_power_load_no_var_loads"], + retrieve_hass_conf["sensor_power_photovoltaics"], + ] + rh.get_data( + days_list, + var_list, + minimal_response=False, + significant_changes_only=False, + ) # Prepare data for optimization - rh.prepare_data(retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = retrieve_hass_conf['load_negative'], - set_zero_min = retrieve_hass_conf['set_zero_min'], - var_replace_zero = retrieve_hass_conf['sensor_replace_zero'], - var_interp = retrieve_hass_conf['sensor_linear_interp']) + rh.prepare_data( + retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=retrieve_hass_conf["load_negative"], + set_zero_min=retrieve_hass_conf["set_zero_min"], + var_replace_zero=retrieve_hass_conf["sensor_replace_zero"], + var_interp=retrieve_hass_conf["sensor_linear_interp"], + ) df_input_data = rh.df_final.copy() # Create forecast object - fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, - params, emhass_conf, logger, get_data_from_file=True) + fcst = Forecast( + retrieve_hass_conf, + optim_conf, + plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=True, + ) # Obtain only 48 rows of data and remove last column for input df_input_data = copy.deepcopy(df_input_data).iloc[-49:-1] - # Get weather forecast with list + # Get weather forecast with list P_PV_forecast = fcst.get_weather_forecast() df_input_data.index = P_PV_forecast.index df_input_data.index.freq = rh.df_final.index.freq # Get load cost forecast with list, check values from output - df_input_data = fcst.get_load_cost_forecast( - df_input_data, method='list') + df_input_data = fcst.get_load_cost_forecast(df_input_data, method="list") self.assertTrue(fcst.var_load_cost in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - self.assertTrue(df_input_data['unit_load_cost'].values[0] == 1) - self.assertTrue(df_input_data['unit_load_cost'].values[-1] == 48) + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + self.assertTrue(df_input_data["unit_load_cost"].values[0] == 1) + self.assertTrue(df_input_data["unit_load_cost"].values[-1] == 48) # Get production price forecast with list, check values from output - df_input_data = fcst.get_prod_price_forecast( - df_input_data, method='list') + df_input_data = fcst.get_prod_price_forecast(df_input_data, method="list") self.assertTrue(fcst.var_prod_price in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - self.assertTrue(df_input_data['unit_prod_price'].values[0] == 1) - self.assertTrue(df_input_data['unit_prod_price'].values[-1] == 48) - + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + self.assertTrue(df_input_data["unit_prod_price"].values[0] == 1) + self.assertTrue(df_input_data["unit_prod_price"].values[-1] == 48) + def test_get_power_from_weather(self): self.assertIsInstance(self.P_PV_forecast, pd.core.series.Series) - self.assertIsInstance(self.P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + self.P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(self.P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.df_weather_scrap), len(self.P_PV_forecast)) # Test passing a lists of PV params - self.plant_conf['pv_module_model'] = [self.plant_conf['pv_module_model'], self.plant_conf['pv_module_model']] - self.plant_conf['pv_inverter_model'] = [self.plant_conf['pv_inverter_model'], self.plant_conf['pv_inverter_model']] - self.plant_conf['surface_tilt'] = [30, 45] - self.plant_conf['surface_azimuth'] = [270, 90] - self.plant_conf['modules_per_string'] = [8, 8] - self.plant_conf['strings_per_inverter'] = [1, 1] - self.fcst = Forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - None, emhass_conf, logger, get_data_from_file=self.get_data_from_file) - df_weather_scrap = self.fcst.get_weather_forecast(method='csv') + self.plant_conf["pv_module_model"] = [ + self.plant_conf["pv_module_model"], + self.plant_conf["pv_module_model"], + ] + self.plant_conf["pv_inverter_model"] = [ + self.plant_conf["pv_inverter_model"], + self.plant_conf["pv_inverter_model"], + ] + self.plant_conf["surface_tilt"] = [30, 45] + self.plant_conf["surface_azimuth"] = [270, 90] + self.plant_conf["modules_per_string"] = [8, 8] + self.plant_conf["strings_per_inverter"] = [1, 1] + self.fcst = Forecast( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + None, + emhass_conf, + logger, + get_data_from_file=self.get_data_from_file, + ) + df_weather_scrap = self.fcst.get_weather_forecast(method="csv") P_PV_forecast = self.fcst.get_power_from_weather(df_weather_scrap) self.assertIsInstance(P_PV_forecast, pd.core.series.Series) - self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.df_weather_scrap), len(P_PV_forecast)) # Test the mixed forecast - params = json.dumps({'passed_data':{'alpha':0.5,'beta':0.5}}) - df_input_data = self.input_data_dict['rh'].df_final.copy() - self.fcst = Forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - params, emhass_conf, logger, get_data_from_file=self.get_data_from_file) - df_weather_scrap = self.fcst.get_weather_forecast(method='csv') - P_PV_forecast = self.fcst.get_power_from_weather(df_weather_scrap, set_mix_forecast=True, df_now=df_input_data) + params = json.dumps({"passed_data": {"alpha": 0.5, "beta": 0.5}}) + df_input_data = self.input_data_dict["rh"].df_final.copy() + self.fcst = Forecast( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=self.get_data_from_file, + ) + df_weather_scrap = self.fcst.get_weather_forecast(method="csv") + P_PV_forecast = self.fcst.get_power_from_weather( + df_weather_scrap, set_mix_forecast=True, df_now=df_input_data + ) self.assertIsInstance(P_PV_forecast, pd.core.series.Series) - self.assertIsInstance(P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_PV_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_PV_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_PV_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.df_weather_scrap), len(P_PV_forecast)) - + # Test dataframe output of load forecast def test_get_load_forecast(self): P_load_forecast = self.fcst.get_load_forecast() self.assertIsInstance(P_load_forecast, pd.core.series.Series) - self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_load_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast)) - print(">> The length of the load forecast = "+str(len(P_load_forecast))) + print(">> The length of the load forecast = " + str(len(P_load_forecast))) # Test the mixed forecast - params = json.dumps({'passed_data':{'alpha':0.5,'beta':0.5}}) - df_input_data = self.input_data_dict['rh'].df_final.copy() - self.fcst = Forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - params, emhass_conf, logger, get_data_from_file=self.get_data_from_file) - P_load_forecast = self.fcst.get_load_forecast(set_mix_forecast=True, df_now=df_input_data) + params = json.dumps({"passed_data": {"alpha": 0.5, "beta": 0.5}}) + df_input_data = self.input_data_dict["rh"].df_final.copy() + self.fcst = Forecast( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=self.get_data_from_file, + ) + P_load_forecast = self.fcst.get_load_forecast( + set_mix_forecast=True, df_now=df_input_data + ) self.assertIsInstance(P_load_forecast, pd.core.series.Series) - self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_load_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast)) # Test load forecast from csv P_load_forecast = self.fcst.get_load_forecast(method="csv") self.assertIsInstance(P_load_forecast, pd.core.series.Series) - self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_load_forecast.index.tz, self.fcst.time_zone) self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast)) - + # Test dataframe output of ml load forecast def test_get_load_forecast_mlforecaster(self): params = TestForecast.get_test_params() params_json = json.dumps(params) - costfun = 'profit' - action = 'forecast-model-fit' # fit, predict and tune methods + costfun = "profit" + action = "forecast-model-fit" # fit, predict and tune methods params = copy.deepcopy(json.loads(params_json)) # pass custom runtime parameters runtimeparams = { - 'historic_days_to_retrieve': 20, + "historic_days_to_retrieve": 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", - "num_lags": 48 + "num_lags": 48, } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams - params['optim_conf']['load_forecast_method'] = 'mlforecaster' + params["passed_data"] = runtimeparams + params["optim_conf"]["load_forecast_method"] = "mlforecaster" params_json = json.dumps(params) - input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) - data = copy.deepcopy(input_data_dict['df_input_data']) + input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) + data = copy.deepcopy(input_data_dict["df_input_data"]) # Create MLForecaster Object - model_type = input_data_dict['params']['passed_data']['model_type'] - var_model = input_data_dict['params']['passed_data']['var_model'] - sklearn_model = input_data_dict['params']['passed_data']['sklearn_model'] - num_lags = input_data_dict['params']['passed_data']['num_lags'] - mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger) + model_type = input_data_dict["params"]["passed_data"]["model_type"] + var_model = input_data_dict["params"]["passed_data"]["var_model"] + sklearn_model = input_data_dict["params"]["passed_data"]["sklearn_model"] + num_lags = input_data_dict["params"]["passed_data"]["num_lags"] + mlf = MLForecaster( + data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger + ) mlf.fit() - # Get load forecast using mlforecaster - P_load_forecast = input_data_dict['fcst'].get_load_forecast(method="mlforecaster", use_last_window=False, - debug=True, mlf=mlf) + # Get load forecast using mlforecaster + P_load_forecast = input_data_dict["fcst"].get_load_forecast( + method="mlforecaster", use_last_window=False, debug=True, mlf=mlf + ) self.assertIsInstance(P_load_forecast, pd.core.series.Series) - self.assertIsInstance(P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + P_load_forecast.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + P_load_forecast.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(P_load_forecast.index.tz, self.fcst.time_zone) self.assertTrue((P_load_forecast.index == self.fcst.forecast_dates).all()) self.assertEqual(len(self.P_PV_forecast), len(P_load_forecast)) - + # Test load cost forecast dataframe output using saved csv referece file def test_get_load_cost_forecast(self): df_input_data = self.fcst.get_load_cost_forecast(self.df_input_data) self.assertTrue(self.fcst.var_load_cost in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - df_input_data = self.fcst.get_load_cost_forecast(self.df_input_data, method='csv', - csv_path='data_load_cost_forecast.csv') + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + df_input_data = self.fcst.get_load_cost_forecast( + self.df_input_data, method="csv", csv_path="data_load_cost_forecast.csv" + ) self.assertTrue(self.fcst.var_load_cost in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + # Test production price forecast dataframe output using saved csv referece file def test_get_prod_price_forecast(self): df_input_data = self.fcst.get_prod_price_forecast(self.df_input_data) self.assertTrue(self.fcst.var_prod_price in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - df_input_data = self.fcst.get_prod_price_forecast(self.df_input_data, method='csv', - csv_path='data_load_cost_forecast.csv') + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + df_input_data = self.fcst.get_prod_price_forecast( + self.df_input_data, method="csv", csv_path="data_load_cost_forecast.csv" + ) self.assertTrue(self.fcst.var_prod_price in df_input_data.columns) - self.assertTrue(df_input_data.isnull().sum().sum()==0) - -if __name__ == '__main__': + self.assertTrue(df_input_data.isnull().sum().sum() == 0) + + +if __name__ == "__main__": unittest.main() ch.close() logger.removeHandler(ch) diff --git a/tests/test_machine_learning_forecaster.py b/tests/test_machine_learning_forecaster.py index 3cb7775d..ad77b8e9 100644 --- a/tests/test_machine_learning_forecaster.py +++ b/tests/test_machine_learning_forecaster.py @@ -1,85 +1,107 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -import unittest -from unittest.mock import patch -import pathlib -import json import copy +import json +import pathlib import pickle -import pandas as pd -import numpy as np +import unittest +import numpy as np +import pandas as pd from skforecast.recursive import ForecasterRecursive +from emhass import utils from emhass.command_line import set_input_data_dict -from emhass.retrieve_hass import RetrieveHass from emhass.machine_learning_forecaster import MLForecaster -from emhass import utils +from emhass.retrieve_hass import RetrieveHass # The root folder root = pathlib.Path(utils.get_root(__file__, num_parent=2)) # Build emhass_conf paths emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) + class TestMLForecaster(unittest.TestCase): - @staticmethod def get_test_params(): # Build params with default config and secrets - if emhass_conf['defaults_path'].exists(): - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = utils.build_secrets(emhass_conf,logger,no_response=True) - params = utils.build_params(emhass_conf,secrets,config,logger) + if emhass_conf["defaults_path"].exists(): + config = utils.build_config( + emhass_conf, logger, emhass_conf["defaults_path"] + ) + _, secrets = utils.build_secrets(emhass_conf, logger, no_response=True) + params = utils.build_params(emhass_conf, secrets, config, logger) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) return params def setUp(self): params = TestMLForecaster.get_test_params() - costfun = 'profit' - action = 'forecast-model-fit' # fit, predict and tune methods + costfun = "profit" + action = "forecast-model-fit" # fit, predict and tune methods # Create runtime parameters runtimeparams = { - 'historic_days_to_retrieve': 20, + "historic_days_to_retrieve": 20, "model_type": "load_forecast", "var_model": "sensor.power_load_no_var_loads", "sklearn_model": "KNeighborsRegressor", - "num_lags": 48 + "num_lags": 48, } runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams - params['optim_conf']['load_forecast_method'] = 'skforecast' - #Create input dictionary + params["passed_data"] = runtimeparams + params["optim_conf"]["load_forecast_method"] = "skforecast" + # Create input dictionary params_json = json.dumps(params) - self.input_data_dict = set_input_data_dict(emhass_conf, costfun, params_json, runtimeparams_json, - action, logger, get_data_from_file=True) + self.input_data_dict = set_input_data_dict( + emhass_conf, + costfun, + params_json, + runtimeparams_json, + action, + logger, + get_data_from_file=True, + ) # Create MLForcaster object - data = copy.deepcopy(self.input_data_dict['df_input_data']) - model_type = self.input_data_dict['params']['passed_data']['model_type'] - var_model = self.input_data_dict['params']['passed_data']['var_model'] - sklearn_model = self.input_data_dict['params']['passed_data']['sklearn_model'] - num_lags = self.input_data_dict['params']['passed_data']['num_lags'] - self.mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger) + data = copy.deepcopy(self.input_data_dict["df_input_data"]) + model_type = self.input_data_dict["params"]["passed_data"]["model_type"] + var_model = self.input_data_dict["params"]["passed_data"]["var_model"] + sklearn_model = self.input_data_dict["params"]["passed_data"]["sklearn_model"] + num_lags = self.input_data_dict["params"]["passed_data"]["num_lags"] + self.mlf = MLForecaster( + data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger + ) # Create RetrieveHass Object get_data_from_file = True params = None - self.retrieve_hass_conf, self.optim_conf, _ = utils.get_yaml_parse(params_json,logger) - self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], - params_json, emhass_conf, logger, get_data_from_file=get_data_from_file) + self.retrieve_hass_conf, self.optim_conf, _ = utils.get_yaml_parse( + params_json, logger + ) + self.rh = RetrieveHass( + self.retrieve_hass_conf["hass_url"], + self.retrieve_hass_conf["long_lived_token"], + self.retrieve_hass_conf["optimization_time_step"], + self.retrieve_hass_conf["time_zone"], + params_json, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) # Open and extract saved sensor data to test against - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) - + def test_fit(self): df_pred, df_pred_backtest = self.mlf.fit() self.assertIsInstance(self.mlf.forecaster, ForecasterRecursive) @@ -90,7 +112,7 @@ def test_fit(self): self.assertIsInstance(self.mlf.forecaster, ForecasterRecursive) self.assertIsInstance(df_pred, pd.DataFrame) self.assertIsInstance(df_pred_backtest, pd.DataFrame) - + def test_predict(self): self.mlf.fit() predictions = self.mlf.predict() @@ -98,14 +120,14 @@ def test_predict(self): self.assertTrue(predictions.isnull().sum().sum() == 0) # Test predict in production env using last_window data_tmp = copy.deepcopy(self.rh.df_final)[[self.mlf.var_model]] - data_last_window = data_tmp[data_tmp.index[-1] - pd.offsets.Day(2):] + data_last_window = data_tmp[data_tmp.index[-1] - pd.offsets.Day(2) :] predictions = self.mlf.predict(data_last_window) self.assertIsInstance(predictions, pd.Series) self.assertTrue(predictions.isnull().sum().sum() == 0) # Test again with last_window data but with NaNs - data_last_window.at[data_last_window.index[10],self.mlf.var_model] = np.nan - data_last_window.at[data_last_window.index[11],self.mlf.var_model] = np.nan - data_last_window.at[data_last_window.index[12],self.mlf.var_model] = np.nan + data_last_window.at[data_last_window.index[10], self.mlf.var_model] = np.nan + data_last_window.at[data_last_window.index[11], self.mlf.var_model] = np.nan + data_last_window.at[data_last_window.index[12], self.mlf.var_model] = np.nan predictions = self.mlf.predict(data_last_window) self.assertIsInstance(predictions, pd.Series) self.assertTrue(predictions.isnull().sum().sum() == 0) @@ -116,37 +138,41 @@ def test_predict(self): predictions = self.mlf.predict() self.assertIsInstance(predictions, pd.Series) self.assertTrue(predictions.isnull().sum().sum() == 0) - + def test_tune(self): self.mlf.fit() df_pred_optim = self.mlf.tune(debug=True) self.assertIsInstance(df_pred_optim, pd.DataFrame) self.assertTrue(self.mlf.is_tuned == True) # Test LinearRegression - data = copy.deepcopy(self.input_data_dict['df_input_data']) - model_type = self.input_data_dict['params']['passed_data']['model_type'] - var_model = self.input_data_dict['params']['passed_data']['var_model'] - sklearn_model = 'LinearRegression' - num_lags = self.input_data_dict['params']['passed_data']['num_lags'] - self.mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger) + data = copy.deepcopy(self.input_data_dict["df_input_data"]) + model_type = self.input_data_dict["params"]["passed_data"]["model_type"] + var_model = self.input_data_dict["params"]["passed_data"]["var_model"] + sklearn_model = "LinearRegression" + num_lags = self.input_data_dict["params"]["passed_data"]["num_lags"] + self.mlf = MLForecaster( + data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger + ) self.mlf.fit() df_pred_optim = self.mlf.tune(debug=True) self.assertIsInstance(df_pred_optim, pd.DataFrame) self.assertTrue(self.mlf.is_tuned == True) # Test ElasticNet - data = copy.deepcopy(self.input_data_dict['df_input_data']) - model_type = self.input_data_dict['params']['passed_data']['model_type'] - var_model = self.input_data_dict['params']['passed_data']['var_model'] - sklearn_model = 'ElasticNet' - num_lags = self.input_data_dict['params']['passed_data']['num_lags'] - self.mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger) + data = copy.deepcopy(self.input_data_dict["df_input_data"]) + model_type = self.input_data_dict["params"]["passed_data"]["model_type"] + var_model = self.input_data_dict["params"]["passed_data"]["var_model"] + sklearn_model = "ElasticNet" + num_lags = self.input_data_dict["params"]["passed_data"]["num_lags"] + self.mlf = MLForecaster( + data, model_type, var_model, sklearn_model, num_lags, emhass_conf, logger + ) self.mlf.fit() df_pred_optim = self.mlf.tune(debug=True) self.assertIsInstance(df_pred_optim, pd.DataFrame) self.assertTrue(self.mlf.is_tuned == True) - - -if __name__ == '__main__': + + +if __name__ == "__main__": unittest.main() ch.close() logger.removeHandler(ch) diff --git a/tests/test_machine_learning_regressor.py b/tests/test_machine_learning_regressor.py index ab6d1de9..2a39cd10 100644 --- a/tests/test_machine_learning_regressor.py +++ b/tests/test_machine_learning_regressor.py @@ -7,19 +7,20 @@ import numpy as np import pandas as pd +from sklearn.pipeline import Pipeline + from emhass import utils from emhass.command_line import set_input_data_dict from emhass.machine_learning_regressor import MLRegressor -from sklearn.pipeline import Pipeline # The root folder root = pathlib.Path(utils.get_root(__file__, num_parent=2)) # Build emhass_conf paths emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger @@ -30,12 +31,17 @@ class TestMLRegressor(unittest.TestCase): @staticmethod def get_test_params(): # Build params with default config and secrets - if emhass_conf['defaults_path'].exists(): - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = utils.build_secrets(emhass_conf,logger,no_response=True) - params = utils.build_params(emhass_conf,secrets,config,logger) + if emhass_conf["defaults_path"].exists(): + config = utils.build_config( + emhass_conf, logger, emhass_conf["defaults_path"] + ) + _, secrets = utils.build_secrets(emhass_conf, logger, no_response=True) + params = utils.build_params(emhass_conf, secrets, config, logger) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) return params def setUp(self): @@ -43,7 +49,7 @@ def setUp(self): params = TestMLRegressor.get_test_params() costfun = "profit" action = "regressor-model-fit" # fit and predict methods - params["optim_conf"]['load_forecast_method'] = "skforecast" + params["optim_conf"]["load_forecast_method"] = "skforecast" # runtime parameters runtimeparams = { "csv_file": "heating_prediction.csv", @@ -53,7 +59,7 @@ def setUp(self): "model_type": "heating_hours_degreeday", "timestamp": "timestamp", "date_features": ["month", "day_of_week"], - "new_values": [12.79, 4.766, 1, 2] + "new_values": [12.79, 4.766, 1, 2], } params["passed_data"] = runtimeparams runtimeparams_json = json.dumps(runtimeparams) diff --git a/tests/test_optimization.py b/tests/test_optimization.py index 0be0dca4..733e93cb 100644 --- a/tests/test_optimization.py +++ b/tests/test_optimization.py @@ -3,269 +3,501 @@ import json import os -import unittest -import pandas as pd -import numpy as np import pathlib import pickle import random -from datetime import datetime, timezone +import unittest -from emhass.retrieve_hass import RetrieveHass -from emhass.optimization import Optimization -from emhass.forecast import Forecast -from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger,build_config, build_params, build_secrets +import numpy as np +import pandas as pd from pandas.testing import assert_series_equal +from emhass.forecast import Forecast +from emhass.optimization import Optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.utils import ( + build_config, + build_params, + build_secrets, + get_days_list, + get_logger, + get_root, + get_yaml_parse, +) + # The root folder root = pathlib.Path(get_root(__file__, num_parent=2)) # Build emhass_conf paths emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -class TestOptimization(unittest.TestCase): +class TestOptimization(unittest.TestCase): def setUp(self): get_data_from_file = True params = {} # Build params with default config and secrets - if emhass_conf['defaults_path'].exists(): - config = build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = build_secrets(emhass_conf,logger,no_response=True) - params = build_params(emhass_conf,secrets,config,logger) + if emhass_conf["defaults_path"].exists(): + config = build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + _, secrets = build_secrets(emhass_conf, logger, no_response=True) + params = build_params(emhass_conf, secrets, config, logger) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) - retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(json.dumps(params),logger) - self.retrieve_hass_conf, self.optim_conf, self.plant_conf = \ - retrieve_hass_conf, optim_conf, plant_conf - #Build RetrieveHass object - self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], - params, emhass_conf, logger) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) + retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse( + json.dumps(params), logger + ) + self.retrieve_hass_conf, self.optim_conf, self.plant_conf = ( + retrieve_hass_conf, + optim_conf, + plant_conf, + ) + # Build RetrieveHass object + self.rh = RetrieveHass( + self.retrieve_hass_conf["hass_url"], + self.retrieve_hass_conf["long_lived_token"], + self.retrieve_hass_conf["optimization_time_step"], + self.retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + ) # Obtain sensor values from saved file if get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) - self.retrieve_hass_conf['sensor_power_load_no_var_loads'] = str(self.var_list[0]) - self.retrieve_hass_conf['sensor_power_photovoltaics'] = str(self.var_list[1]) - self.retrieve_hass_conf['sensor_linear_interp'] = [retrieve_hass_conf['sensor_power_photovoltaics'], retrieve_hass_conf['sensor_power_load_no_var_loads']] - self.retrieve_hass_conf['sensor_replace_zero'] = [retrieve_hass_conf['sensor_power_photovoltaics']] + self.retrieve_hass_conf["sensor_power_load_no_var_loads"] = str( + self.var_list[0] + ) + self.retrieve_hass_conf["sensor_power_photovoltaics"] = str( + self.var_list[1] + ) + self.retrieve_hass_conf["sensor_linear_interp"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"], + retrieve_hass_conf["sensor_power_load_no_var_loads"], + ] + self.retrieve_hass_conf["sensor_replace_zero"] = [ + retrieve_hass_conf["sensor_power_photovoltaics"] + ] # Else obtain sensor values from HA else: - self.days_list = get_days_list(self.retrieve_hass_conf['historic_days_to_retrieve']) - self.var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads'], self.retrieve_hass_conf['sensor_power_photovoltaics']] - self.rh.get_data(self.days_list, self.var_list, - minimal_response=False, significant_changes_only=False) + self.days_list = get_days_list( + self.retrieve_hass_conf["historic_days_to_retrieve"] + ) + self.var_list = [ + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + self.retrieve_hass_conf["sensor_power_photovoltaics"], + ] + self.rh.get_data( + self.days_list, + self.var_list, + minimal_response=False, + significant_changes_only=False, + ) # Prepare data for optimization - self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], load_negative = self.retrieve_hass_conf['load_negative'], - set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], - var_interp = self.retrieve_hass_conf['sensor_linear_interp']) + self.rh.prepare_data( + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=self.retrieve_hass_conf["load_negative"], + set_zero_min=self.retrieve_hass_conf["set_zero_min"], + var_replace_zero=self.retrieve_hass_conf["sensor_replace_zero"], + var_interp=self.retrieve_hass_conf["sensor_linear_interp"], + ) self.df_input_data = self.rh.df_final.copy() - #Build Forecast object - self.fcst = Forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - params, emhass_conf, logger, get_data_from_file=get_data_from_file) - self.df_weather = self.fcst.get_weather_forecast(method='csv') + # Build Forecast object + self.fcst = Forecast( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + params, + emhass_conf, + logger, + get_data_from_file=get_data_from_file, + ) + self.df_weather = self.fcst.get_weather_forecast(method="csv") self.P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather) - self.P_load_forecast = self.fcst.get_load_forecast(method=optim_conf['load_forecast_method']) - self.df_input_data_dayahead = pd.concat([self.P_PV_forecast, self.P_load_forecast], axis=1) - self.df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - #Build Optimization object - self.costfun = 'profit' - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.P_load_forecast = self.fcst.get_load_forecast( + method=optim_conf["load_forecast_method"] + ) + self.df_input_data_dayahead = pd.concat( + [self.P_PV_forecast, self.P_load_forecast], axis=1 + ) + self.df_input_data_dayahead.columns = ["P_PV_forecast", "P_load_forecast"] + # Build Optimization object + self.costfun = "profit" + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) self.df_input_data = self.fcst.get_load_cost_forecast(self.df_input_data) self.df_input_data = self.fcst.get_prod_price_forecast(self.df_input_data) self.input_data_dict = { - 'retrieve_hass_conf': retrieve_hass_conf, + "retrieve_hass_conf": retrieve_hass_conf, } - + # Check formatting of output from perfect optimization def test_perform_perfect_forecast_optim(self): - self.opt_res = self.opt.perform_perfect_forecast_optim(self.df_input_data, self.days_list) + self.opt_res = self.opt.perform_perfect_forecast_optim( + self.df_input_data, self.days_list + ) self.assertIsInstance(self.opt_res, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - self.assertTrue('cost_fun_'+self.costfun in self.opt_res.columns) - - + self.assertIsInstance( + self.opt_res.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + self.assertTrue("cost_fun_" + self.costfun in self.opt_res.columns) + def test_perform_dayahead_forecast_optim(self): # Check formatting of output from dayahead optimization - self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) + self.df_input_data_dayahead = self.fcst.get_load_cost_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead = self.fcst.get_prod_price_forecast( + self.df_input_data_dayahead + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns) - self.assertTrue(self.opt_res_dayahead['P_deferrable0'].sum()*( - self.retrieve_hass_conf['optimization_time_step'].seconds/3600) == self.optim_conf['nominal_power_of_deferrable_loads'][0]*self.optim_conf['operating_hours_of_each_deferrable_load'][0]) + self.assertIsInstance( + self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + self.assertTrue("cost_fun_" + self.costfun in self.opt_res_dayahead.columns) + self.assertTrue( + self.opt_res_dayahead["P_deferrable0"].sum() + * (self.retrieve_hass_conf["optimization_time_step"].seconds / 3600) + == self.optim_conf["nominal_power_of_deferrable_loads"][0] + * self.optim_conf["operating_hours_of_each_deferrable_load"][0] + ) # Test the battery, dynamics and grid exchange contraints - self.optim_conf.update({'set_use_battery': True}) - self.optim_conf.update({'set_nocharge_from_grid': True}) - self.optim_conf.update({'set_battery_dynamic': True}) - self.optim_conf.update({'set_nodischarge_to_grid': True}) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.optim_conf.update({"set_use_battery": True}) + self.optim_conf.update({"set_nocharge_from_grid": True}) + self.optim_conf.update({"set_battery_dynamic": True}) + self.optim_conf.update({"set_nodischarge_to_grid": True}) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertTrue('P_batt' in self.opt_res_dayahead.columns) - self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns) - self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], self.plant_conf['battery_target_state_of_charge']) + self.assertTrue("P_batt" in self.opt_res_dayahead.columns) + self.assertTrue("SOC_opt" in self.opt_res_dayahead.columns) + self.assertAlmostEqual( + self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1], "SOC_opt"], + self.plant_conf["battery_target_state_of_charge"], + ) # Test table conversion - opt_res = pd.read_csv(emhass_conf['data_path'] / 'opt_res_latest.csv', index_col='timestamp') - cost_cols = [i for i in opt_res.columns if 'cost_' in i] - table = opt_res[cost_cols].reset_index().sum(numeric_only=True).to_frame(name='Cost Totals').reset_index() - self.assertTrue(table.columns[0]=='index') - self.assertTrue(table.columns[1]=='Cost Totals') + opt_res = pd.read_csv( + emhass_conf["data_path"] / "opt_res_latest.csv", index_col="timestamp" + ) + cost_cols = [i for i in opt_res.columns if "cost_" in i] + table = ( + opt_res[cost_cols] + .reset_index() + .sum(numeric_only=True) + .to_frame(name="Cost Totals") + .reset_index() + ) + self.assertTrue(table.columns[0] == "index") + self.assertTrue(table.columns[1] == "Cost Totals") # Check status - self.assertTrue('optim_status' in self.opt_res_dayahead.columns) + self.assertTrue("optim_status" in self.opt_res_dayahead.columns) # Test treat_def_as_semi_cont and set_def_constant constraints - self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [True, True]}) - self.optim_conf.update({'set_deferrable_load_single_constant': [True, True]}) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.optim_conf.update({"treat_deferrable_load_as_semi_cont": [True, True]}) + self.optim_conf.update({"set_deferrable_load_single_constant": [True, True]}) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) - self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, True]}) - self.optim_conf.update({'set_deferrable_load_single_constant': [True, True]}) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) + self.assertTrue(self.opt.optim_status == "Optimal") + self.optim_conf.update({"treat_deferrable_load_as_semi_cont": [False, True]}) + self.optim_conf.update({"set_deferrable_load_single_constant": [True, True]}) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) - self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, True]}) - self.optim_conf.update({'set_deferrable_load_single_constant': [False, True]}) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) + self.assertTrue(self.opt.optim_status == "Optimal") + self.optim_conf.update({"treat_deferrable_load_as_semi_cont": [False, True]}) + self.optim_conf.update({"set_deferrable_load_single_constant": [False, True]}) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) - self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, False]}) - self.optim_conf.update({'set_deferrable_load_single_constant': [False, True]}) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) + self.assertTrue(self.opt.optim_status == "Optimal") + self.optim_conf.update({"treat_deferrable_load_as_semi_cont": [False, False]}) + self.optim_conf.update({"set_deferrable_load_single_constant": [False, True]}) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) - self.assertTrue(self.opt.optim_status == 'Optimal') - self.optim_conf.update({'treat_deferrable_load_as_semi_cont': [False, False]}) - self.optim_conf.update({'set_deferrable_load_single_constant': [False, False]}) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) + self.assertTrue(self.opt.optim_status == "Optimal") + self.optim_conf.update({"treat_deferrable_load_as_semi_cont": [False, False]}) + self.optim_conf.update({"set_deferrable_load_single_constant": [False, False]}) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) - self.assertTrue(self.opt.optim_status == 'Optimal') + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) + self.assertTrue(self.opt.optim_status == "Optimal") # Test with different default solver, debug mode and batt SOC conditions - del self.optim_conf['lp_solver'] - del self.optim_conf['lp_solver_path'] - self.optim_conf['set_use_battery'] = True + del self.optim_conf["lp_solver"] + del self.optim_conf["lp_solver_path"] + self.optim_conf["set_use_battery"] = True soc_init = None soc_final = 0.3 - self.optim_conf['set_total_pv_sell'] = True - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) - + self.optim_conf["set_total_pv_sell"] = True + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) + unit_load_cost = self.df_input_data_dayahead[self.opt.var_load_cost].values unit_prod_price = self.df_input_data_dayahead[self.opt.var_prod_price].values self.opt_res_dayahead = self.opt.perform_optimization( - self.df_input_data_dayahead, self.P_PV_forecast.values.ravel(), - self.P_load_forecast.values.ravel(), unit_load_cost, unit_prod_price, - soc_init = soc_init, soc_final = soc_final, debug = True) + self.df_input_data_dayahead, + self.P_PV_forecast.values.ravel(), + self.P_load_forecast.values.ravel(), + unit_load_cost, + unit_prod_price, + soc_init=soc_init, + soc_final=soc_final, + debug=True, + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns) - self.assertTrue(self.opt.optim_status == 'Optimal') - - + self.assertIsInstance( + self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + self.assertTrue("cost_fun_" + self.costfun in self.opt_res_dayahead.columns) + self.assertTrue(self.opt.optim_status == "Optimal") + # Check formatting of output from dayahead optimization in self-consumption def test_perform_dayahead_forecast_optim_costfun_selfconsumption(self): - costfun = 'self-consumption' - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - costfun, emhass_conf, logger) - self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) + costfun = "self-consumption" + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + costfun, + emhass_conf, + logger, + ) + self.df_input_data_dayahead = self.fcst.get_load_cost_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead = self.fcst.get_prod_price_forecast( + self.df_input_data_dayahead + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - self.assertTrue('cost_fun_selfcons' in self.opt_res_dayahead.columns) - + self.assertIsInstance( + self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + self.assertTrue("cost_fun_selfcons" in self.opt_res_dayahead.columns) + # Check formatting of output from dayahead optimization in cost def test_perform_dayahead_forecast_optim_costfun_cost(self): - costfun = 'cost' - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - costfun, emhass_conf, logger) - self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) + costfun = "cost" + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + costfun, + emhass_conf, + logger, + ) + self.df_input_data_dayahead = self.fcst.get_load_cost_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead = self.fcst.get_prod_price_forecast( + self.df_input_data_dayahead + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - self.assertTrue('cost_fun_cost' in self.opt_res_dayahead.columns) - + self.assertIsInstance( + self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + self.assertTrue("cost_fun_cost" in self.opt_res_dayahead.columns) + # def test_perform_dayahead_forecast_optim_aux(self): - self.optim_conf['treat_deferrable_load_as_semi_cont'] = [False, False] - self.optim_conf['set_total_pv_sell'] = True - self.optim_conf['set_deferrable_load_single_constant'] = [True, True] - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) - self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) + self.optim_conf["treat_deferrable_load_as_semi_cont"] = [False, False] + self.optim_conf["set_total_pv_sell"] = True + self.optim_conf["set_deferrable_load_single_constant"] = [True, True] + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) + self.df_input_data_dayahead = self.fcst.get_load_cost_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead = self.fcst.get_prod_price_forecast( + self.df_input_data_dayahead + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) # Test dayahead optimization using different solvers import pulp as pl + solver_list = pl.listSolvers(onlyAvailable=True) for solver in solver_list: - self.optim_conf['lp_solver'] = solver - if os.getenv('lp_solver_path', default=None) == None: - self.optim_conf['lp_solver_path'] = os.getenv('lp_solver_path', default=None) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) - self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) + self.optim_conf["lp_solver"] = solver + if os.getenv("lp_solver_path", default=None) == None: + self.optim_conf["lp_solver_path"] = os.getenv( + "lp_solver_path", default=None + ) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) + self.df_input_data_dayahead = self.fcst.get_load_cost_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead = self.fcst.get_prod_price_forecast( + self.df_input_data_dayahead + ) self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast) + self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - + self.assertIsInstance( + self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + def test_perform_naive_mpc_optim(self): - self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) + self.df_input_data_dayahead = self.fcst.get_load_cost_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead = self.fcst.get_prod_price_forecast( + self.df_input_data_dayahead + ) # Test the battery - self.optim_conf.update({'set_use_battery': True}) - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) + self.optim_conf.update({"set_use_battery": True}) + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) prediction_horizon = 10 soc_init = 0.4 soc_final = 0.6 @@ -273,69 +505,134 @@ def test_perform_naive_mpc_optim(self): def_start_timestep = [-5, 0] def_end_timestep = [4, 0] self.opt_res_dayahead = self.opt.perform_naive_mpc_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast, prediction_horizon, - soc_init=soc_init, soc_final=soc_final, def_total_hours=def_total_hours, def_start_timestep=def_start_timestep, def_end_timestep=def_end_timestep) + self.df_input_data_dayahead, + self.P_PV_forecast, + self.P_load_forecast, + prediction_horizon, + soc_init=soc_init, + soc_final=soc_final, + def_total_hours=def_total_hours, + def_start_timestep=def_start_timestep, + def_end_timestep=def_end_timestep, + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertTrue('P_batt' in self.opt_res_dayahead.columns) - self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns) - self.assertTrue(np.abs(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt']-soc_final)<1e-3) - term1 = self.optim_conf['nominal_power_of_deferrable_loads'][0]*def_total_hours[0] - term2 = self.opt_res_dayahead['P_deferrable0'].sum()*(self.retrieve_hass_conf['optimization_time_step'].seconds/3600) - self.assertTrue(np.abs(term1-term2)<1e-3) + self.assertTrue("P_batt" in self.opt_res_dayahead.columns) + self.assertTrue("SOC_opt" in self.opt_res_dayahead.columns) + self.assertTrue( + np.abs( + self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1], "SOC_opt"] + - soc_final + ) + < 1e-3 + ) + term1 = ( + self.optim_conf["nominal_power_of_deferrable_loads"][0] * def_total_hours[0] + ) + term2 = self.opt_res_dayahead["P_deferrable0"].sum() * ( + self.retrieve_hass_conf["optimization_time_step"].seconds / 3600 + ) + self.assertTrue(np.abs(term1 - term2) < 1e-3) # soc_init = 0.8 soc_final = 0.5 self.opt_res_dayahead = self.opt.perform_naive_mpc_optim( - self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast, prediction_horizon, - soc_init=soc_init, soc_final=soc_final, def_total_hours=def_total_hours, def_start_timestep=def_start_timestep, def_end_timestep=def_end_timestep) - self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], soc_final) - + self.df_input_data_dayahead, + self.P_PV_forecast, + self.P_load_forecast, + prediction_horizon, + soc_init=soc_init, + soc_final=soc_final, + def_total_hours=def_total_hours, + def_start_timestep=def_start_timestep, + def_end_timestep=def_end_timestep, + ) + self.assertAlmostEqual( + self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1], "SOC_opt"], + soc_final, + ) + # Test format output of dayahead optimization with a thermal deferrable load def test_thermal_load_optim(self): - self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead) - self.df_input_data_dayahead['outdoor_temperature_forecast'] = [random.normalvariate(10.0, 3.0) for _ in range(48)] + self.df_input_data_dayahead = self.fcst.get_load_cost_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead = self.fcst.get_prod_price_forecast( + self.df_input_data_dayahead + ) + self.df_input_data_dayahead["outdoor_temperature_forecast"] = [ + random.normalvariate(10.0, 3.0) for _ in range(48) + ] runtimeparams = { - 'def_load_config': [ + "def_load_config": [ {}, - {'thermal_config': { - 'heating_rate': 5.0, - 'cooling_constant': 0.1, - 'overshoot_temperature': 24.0, - 'start_temperature': 20, - 'desired_temperatures': [21]*48, + { + "thermal_config": { + "heating_rate": 5.0, + "cooling_constant": 0.1, + "overshoot_temperature": 24.0, + "start_temperature": 20, + "desired_temperatures": [21] * 48, } - } + }, ] } - self.optim_conf["def_load_config"] = runtimeparams['def_load_config'] - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) - unit_load_cost = self.df_input_data_dayahead[self.opt.var_load_cost].values # €/kWh - unit_prod_price = self.df_input_data_dayahead[self.opt.var_prod_price].values # €/kWh - self.opt_res_dayahead = self.opt.perform_optimization(self.df_input_data_dayahead, - self.P_PV_forecast.values.ravel(), - self.P_load_forecast.values.ravel(), - unit_load_cost, unit_prod_price) + self.optim_conf["def_load_config"] = runtimeparams["def_load_config"] + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) + unit_load_cost = self.df_input_data_dayahead[ + self.opt.var_load_cost + ].values # €/kWh + unit_prod_price = self.df_input_data_dayahead[ + self.opt.var_prod_price + ].values # €/kWh + self.opt_res_dayahead = self.opt.perform_optimization( + self.df_input_data_dayahead, + self.P_PV_forecast.values.ravel(), + self.P_load_forecast.values.ravel(), + unit_load_cost, + unit_prod_price, + ) self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame())) - self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) - self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns) - self.assertTrue(self.opt.optim_status == 'Optimal') - + self.assertIsInstance( + self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) + self.assertTrue("cost_fun_" + self.costfun in self.opt_res_dayahead.columns) + self.assertTrue(self.opt.optim_status == "Optimal") + # Setup function to run dayahead optimization for the following tests def run_penalty_test_forecast(self): - self.opt = Optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf, - self.fcst.var_load_cost, self.fcst.var_prod_price, - self.costfun, emhass_conf, logger) - def_total_hours = [5 * self.retrieve_hass_conf['optimization_time_step'].seconds / 3600.0] + self.opt = Optimization( + self.retrieve_hass_conf, + self.optim_conf, + self.plant_conf, + self.fcst.var_load_cost, + self.fcst.var_prod_price, + self.costfun, + emhass_conf, + logger, + ) + def_total_hours = [ + 5 * self.retrieve_hass_conf["optimization_time_step"].seconds / 3600.0 + ] def_start_timestep = [0] def_end_timestep = [0] prediction_horizon = 10 - self.optim_conf.update({'number_of_deferrable_loads': 1}) + self.optim_conf.update({"number_of_deferrable_loads": 1}) - self.fcst.params["passed_data"]["prod_price_forecast"] = [0 for i in range(prediction_horizon)] + self.fcst.params["passed_data"]["prod_price_forecast"] = [ + 0 for i in range(prediction_horizon) + ] self.fcst.params["passed_data"]["solar_forecast_kwp"] = [ 0 for i in range(prediction_horizon) ] @@ -355,19 +652,30 @@ def run_penalty_test_forecast(self): prediction_horizon, def_total_hours=def_total_hours, def_start_timestep=def_start_timestep, - def_end_timestep=def_end_timestep + def_end_timestep=def_end_timestep, ) # Test load is constant def test_constant_load(self): - self.fcst.params["passed_data"]["load_cost_forecast"] = [2,1,1,1,1,1.5,1.1,2,2,2] - self.optim_conf.update({'set_deferrable_load_single_constant': [True]}) + self.fcst.params["passed_data"]["load_cost_forecast"] = [ + 2, + 1, + 1, + 1, + 1, + 1.5, + 1.1, + 2, + 2, + 2, + ] + self.optim_conf.update({"set_deferrable_load_single_constant": [True]}) self.run_penalty_test_forecast() assert_series_equal( self.opt_res_dayahead["P_deferrable0"], - self.optim_conf['nominal_power_of_deferrable_loads'][0] + self.optim_conf["nominal_power_of_deferrable_loads"][0] * pd.Series( [0, 1, 1, 1, 1, 1, 0, 0, 0, 0], index=self.opt_res_dayahead.index ), @@ -376,14 +684,25 @@ def test_constant_load(self): # Test no startup penalty when bump is small def test_startup_penalty_continuous_with_small_bump(self): - self.fcst.params["passed_data"]["load_cost_forecast"] = [2,1,1,1,1,1.5,1.1,2,2,2] - self.optim_conf.update({'set_deferrable_startup_penalty': [100.0]}) + self.fcst.params["passed_data"]["load_cost_forecast"] = [ + 2, + 1, + 1, + 1, + 1, + 1.5, + 1.1, + 2, + 2, + 2, + ] + self.optim_conf.update({"set_deferrable_startup_penalty": [100.0]}) self.run_penalty_test_forecast() assert_series_equal( self.opt_res_dayahead["P_deferrable0"], - self.optim_conf['nominal_power_of_deferrable_loads'][0] + self.optim_conf["nominal_power_of_deferrable_loads"][0] * pd.Series( [0, 1, 1, 1, 1, 1, 0, 0, 0, 0], index=self.opt_res_dayahead.index ), @@ -392,42 +711,83 @@ def test_startup_penalty_continuous_with_small_bump(self): # Test startup penalty def test_startup_penalty_discontinuity_when_justified(self): - self.fcst.params["passed_data"]["load_cost_forecast"] = [2,1,1,1,1,1.5,1.1,2,2,2] + self.fcst.params["passed_data"]["load_cost_forecast"] = [ + 2, + 1, + 1, + 1, + 1, + 1.5, + 1.1, + 2, + 2, + 2, + ] - self.optim_conf.update({'set_deferrable_startup_penalty': [0.1]}) + self.optim_conf.update({"set_deferrable_startup_penalty": [0.1]}) self.run_penalty_test_forecast() - assert_series_equal(self.opt_res_dayahead["P_deferrable0"], - self.optim_conf['nominal_power_of_deferrable_loads'][0] * - pd.Series([0, 1, 1, 1, 1, 0, 1, 0, 0, 0], - index=self.opt_res_dayahead.index), - check_names=False) + assert_series_equal( + self.opt_res_dayahead["P_deferrable0"], + self.optim_conf["nominal_power_of_deferrable_loads"][0] + * pd.Series( + [0, 1, 1, 1, 1, 0, 1, 0, 0, 0], index=self.opt_res_dayahead.index + ), + check_names=False, + ) # Test penalty continuity when deferrable load is already on def test_startup_penalty_no_discontinuity_at_start(self): - self.fcst.params["passed_data"]["load_cost_forecast"] = [1.2,1,1,1,1,1.1,2,2,2,2] + self.fcst.params["passed_data"]["load_cost_forecast"] = [ + 1.2, + 1, + 1, + 1, + 1, + 1.1, + 2, + 2, + 2, + 2, + ] - self.optim_conf.update({ - 'set_deferrable_startup_penalty': [100.0], - "def_current_state": [True], - }) + self.optim_conf.update( + { + "set_deferrable_startup_penalty": [100.0], + "def_current_state": [True], + } + ) self.run_penalty_test_forecast() - assert_series_equal(self.opt_res_dayahead["P_deferrable0"], - self.optim_conf['nominal_power_of_deferrable_loads'][0] * - pd.Series([1, 1, 1, 1, 1, 0, 0, 0, 0, 0], - index=self.opt_res_dayahead.index), - check_names=False) + assert_series_equal( + self.opt_res_dayahead["P_deferrable0"], + self.optim_conf["nominal_power_of_deferrable_loads"][0] + * pd.Series( + [1, 1, 1, 1, 1, 0, 0, 0, 0, 0], index=self.opt_res_dayahead.index + ), + check_names=False, + ) # Test delay start def test_startup_penalty_delayed_start(self): - self.fcst.params["passed_data"]["load_cost_forecast"] = [1.2,1,1,1,1,1.1,2,2,2,2] + self.fcst.params["passed_data"]["load_cost_forecast"] = [ + 1.2, + 1, + 1, + 1, + 1, + 1.1, + 2, + 2, + 2, + 2, + ] self.optim_conf.update( { - 'set_deferrable_startup_penalty': [100.0], + "set_deferrable_startup_penalty": [100.0], "def_current_state": [False], } ) @@ -436,7 +796,7 @@ def test_startup_penalty_delayed_start(self): assert_series_equal( self.opt_res_dayahead["P_deferrable0"], - self.optim_conf['nominal_power_of_deferrable_loads'][0] + self.optim_conf["nominal_power_of_deferrable_loads"][0] * pd.Series( [0, 1, 1, 1, 1, 1, 0, 0, 0, 0], index=self.opt_res_dayahead.index ), @@ -444,7 +804,7 @@ def test_startup_penalty_delayed_start(self): ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() ch.close() logger.removeHandler(ch) diff --git a/tests/test_retrieve_hass.py b/tests/test_retrieve_hass.py index 2261a333..3034ab4a 100644 --- a/tests/test_retrieve_hass.py +++ b/tests/test_retrieve_hass.py @@ -1,109 +1,150 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -import datetime -import unittest -import requests_mock -import numpy as np, pandas as pd -import pytz, pathlib, pickle, json, copy import bz2 +import copy +import datetime +import json +import pathlib +import pickle import pickle as cPickle +import unittest + +import numpy as np +import pandas as pd +import requests_mock from emhass import utils from emhass.retrieve_hass import RetrieveHass -from emhass.utils import get_yaml_parse, get_days_list, get_logger +from emhass.utils import get_days_list, get_logger, get_yaml_parse # The root folder root = pathlib.Path(utils.get_root(__file__, num_parent=2)) # Build emhass_conf paths emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['options_path'] = root / 'options.json' -emhass_conf['secrets_path'] = root / 'secrets_emhass(example).yaml' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["options_path"] = root / "options.json" +emhass_conf["secrets_path"] = root / "secrets_emhass(example).yaml" +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # create logger logger, ch = get_logger(__name__, emhass_conf, save_to_file=False) -class TestRetrieveHass(unittest.TestCase): +class TestRetrieveHass(unittest.TestCase): def setUp(self): self.get_data_from_file = True save_data_to_file = False # Build params with default secrets (no config) - if emhass_conf['defaults_path'].exists(): + if emhass_conf["defaults_path"].exists(): if self.get_data_from_file: - _,secrets = utils.build_secrets(emhass_conf,logger,no_response=True) - params = utils.build_params(emhass_conf,secrets,{},logger) - retrieve_hass_conf, _, _ = get_yaml_parse(params,logger) + _, secrets = utils.build_secrets(emhass_conf, logger, no_response=True) + params = utils.build_params(emhass_conf, secrets, {}, logger) + retrieve_hass_conf, _, _ = get_yaml_parse(params, logger) else: - emhass_conf['secrets_path'] = root / 'secrets_emhass.yaml' - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = utils.build_secrets(emhass_conf,logger,secrets_path=emhass_conf['secrets_path'],no_response=True) - params = utils.build_params(emhass_conf,secrets,config,logger) - retrieve_hass_conf, _, _ = get_yaml_parse(params,logger) + emhass_conf["secrets_path"] = root / "secrets_emhass.yaml" + config = utils.build_config( + emhass_conf, logger, emhass_conf["defaults_path"] + ) + _, secrets = utils.build_secrets( + emhass_conf, + logger, + secrets_path=emhass_conf["secrets_path"], + no_response=True, + ) + params = utils.build_params(emhass_conf, secrets, config, logger) + retrieve_hass_conf, _, _ = get_yaml_parse(params, logger) params = None else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) # Force config params for testing retrieve_hass_conf["optimization_time_step"] = pd.to_timedelta(30, "minutes") - retrieve_hass_conf['sensor_power_photovoltaics'] = 'sensor.power_photovoltaics' - retrieve_hass_conf['sensor_power_load_no_var_loads'] = 'sensor.power_load_no_var_loads' - retrieve_hass_conf['sensor_replace_zero'] = ['sensor.power_photovoltaics'] - retrieve_hass_conf['sensor_linear_interp'] = ['sensor.power_photovoltaics','sensor.power_load_no_var_loads'] - retrieve_hass_conf['set_zero_min'] = True - retrieve_hass_conf['load_negative'] = True + retrieve_hass_conf["sensor_power_photovoltaics"] = "sensor.power_photovoltaics" + retrieve_hass_conf["sensor_power_load_no_var_loads"] = ( + "sensor.power_load_no_var_loads" + ) + retrieve_hass_conf["sensor_replace_zero"] = ["sensor.power_photovoltaics"] + retrieve_hass_conf["sensor_linear_interp"] = [ + "sensor.power_photovoltaics", + "sensor.power_load_no_var_loads", + ] + retrieve_hass_conf["set_zero_min"] = True + retrieve_hass_conf["load_negative"] = True self.retrieve_hass_conf = retrieve_hass_conf - self.rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], - self.retrieve_hass_conf['optimization_time_step'], self.retrieve_hass_conf['time_zone'], - params, emhass_conf, logger, get_data_from_file=self.get_data_from_file) + self.rh = RetrieveHass( + self.retrieve_hass_conf["hass_url"], + self.retrieve_hass_conf["long_lived_token"], + self.retrieve_hass_conf["optimization_time_step"], + self.retrieve_hass_conf["time_zone"], + params, + emhass_conf, + logger, + get_data_from_file=self.get_data_from_file, + ) # Obtain sensor values from saved file if self.get_data_from_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp: + with open(emhass_conf["data_path"] / "test_df_final.pkl", "rb") as inp: self.rh.df_final, self.days_list, self.var_list = pickle.load(inp) # Else obtain sensor values from HA else: - self.days_list = get_days_list(self.retrieve_hass_conf['historic_days_to_retrieve']) - self.var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads'], self.retrieve_hass_conf['sensor_power_photovoltaics']] - self.rh.get_data(self.days_list, self.var_list, - minimal_response=False, significant_changes_only=False) - # Check to save updated data to file + self.days_list = get_days_list( + self.retrieve_hass_conf["historic_days_to_retrieve"] + ) + self.var_list = [ + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + self.retrieve_hass_conf["sensor_power_photovoltaics"], + ] + self.rh.get_data( + self.days_list, + self.var_list, + minimal_response=False, + significant_changes_only=False, + ) + # Check to save updated data to file if save_data_to_file: - with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'wb') as outp: - pickle.dump((self.rh.df_final, self.days_list, self.var_list), - outp, pickle.HIGHEST_PROTOCOL) + with open(emhass_conf["data_path"] / "test_df_final.pkl", "wb") as outp: + pickle.dump( + (self.rh.df_final, self.days_list, self.var_list), + outp, + pickle.HIGHEST_PROTOCOL, + ) self.df_raw = self.rh.df_final.copy() - + # Check yaml parse in setUp worked def test_get_yaml_parse(self): self.assertIsInstance(self.retrieve_hass_conf, dict) - self.assertTrue('hass_url' in self.retrieve_hass_conf.keys()) + self.assertTrue("hass_url" in self.retrieve_hass_conf.keys()) if self.get_data_from_file: - self.assertTrue(self.retrieve_hass_conf['hass_url'] == 'https://myhass.duckdns.org/') - - # Check yaml parse worked + self.assertTrue( + self.retrieve_hass_conf["hass_url"] == "https://myhass.duckdns.org/" + ) + + # Check yaml parse worked def test_yaml_parse_web_server(self): params = {} - if emhass_conf['defaults_path'].exists(): - with emhass_conf['defaults_path'].open('r') as data: - defaults = json.load(data) - params.update(utils.build_params(emhass_conf, {}, defaults, logger)) - _, optim_conf, _ = get_yaml_parse(params,logger) + if emhass_conf["defaults_path"].exists(): + with emhass_conf["defaults_path"].open("r") as data: + defaults = json.load(data) + params.update(utils.build_params(emhass_conf, {}, defaults, logger)) + _, optim_conf, _ = get_yaml_parse(params, logger) # Just check forecast methods - self.assertFalse(optim_conf.get('weather_forecast_method') == None) - self.assertFalse(optim_conf.get('load_forecast_method') == None) - self.assertFalse(optim_conf.get('load_cost_forecast_method') == None) - self.assertFalse(optim_conf.get('production_price_forecast_method') == None) + self.assertFalse(optim_conf.get("weather_forecast_method") == None) + self.assertFalse(optim_conf.get("load_forecast_method") == None) + self.assertFalse(optim_conf.get("load_cost_forecast_method") == None) + self.assertFalse(optim_conf.get("production_price_forecast_method") == None) # Assume get_data to HA fails def test_get_data_failed(self): days_list = get_days_list(1) - var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads']] + var_list = [self.retrieve_hass_conf["sensor_power_load_no_var_loads"]] response = self.rh.get_data(days_list, var_list) if self.get_data_from_file: self.assertFalse(response) @@ -114,89 +155,162 @@ def test_get_data_failed(self): def test_get_data_mock(self): with requests_mock.mock() as m: days_list = get_days_list(1) - var_list = [self.retrieve_hass_conf['sensor_power_load_no_var_loads']] - data = bz2.BZ2File(str(emhass_conf['data_path'] / 'test_response_get_data_get_method.pbz2'), "rb") + var_list = [self.retrieve_hass_conf["sensor_power_load_no_var_loads"]] + data = bz2.BZ2File( + str( + emhass_conf["data_path"] / "test_response_get_data_get_method.pbz2" + ), + "rb", + ) data = cPickle.load(data) - m.get(self.retrieve_hass_conf['hass_url'], json=data.json()) - self.rh.get_data(days_list, var_list, - minimal_response=False, significant_changes_only=False, - test_url=self.retrieve_hass_conf['hass_url']) + m.get(self.retrieve_hass_conf["hass_url"], json=data.json()) + self.rh.get_data( + days_list, + var_list, + minimal_response=False, + significant_changes_only=False, + test_url=self.retrieve_hass_conf["hass_url"], + ) self.assertIsInstance(self.rh.df_final, type(pd.DataFrame())) - self.assertIsInstance(self.rh.df_final.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.rh.df_final.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + self.rh.df_final.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.rh.df_final.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(len(self.rh.df_final.columns), len(var_list)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) + self.assertEqual( + self.rh.df_final.index.freq, + self.retrieve_hass_conf["optimization_time_step"], + ) self.assertEqual(self.rh.df_final.index.tz, datetime.timezone.utc) - - + # Check the dataframe was formatted correctly def test_prepare_data(self): self.assertIsInstance(self.rh.df_final, type(pd.DataFrame())) - self.assertIsInstance(self.rh.df_final.index, pd.core.indexes.datetimes.DatetimeIndex) - self.assertIsInstance(self.rh.df_final.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype) + self.assertIsInstance( + self.rh.df_final.index, pd.core.indexes.datetimes.DatetimeIndex + ) + self.assertIsInstance( + self.rh.df_final.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype + ) self.assertEqual(len(self.rh.df_final.columns), len(self.var_list)) - self.assertEqual(self.rh.df_final.index.isin(self.days_list).sum(), len(self.days_list)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) + self.assertEqual( + self.rh.df_final.index.isin(self.days_list).sum(), len(self.days_list) + ) + self.assertEqual( + self.rh.df_final.index.freq, + self.retrieve_hass_conf["optimization_time_step"], + ) self.assertEqual(self.rh.df_final.index.tz, datetime.timezone.utc) - self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], - load_negative = self.retrieve_hass_conf['load_negative'], - set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], - var_interp = self.retrieve_hass_conf['sensor_linear_interp']) + self.rh.prepare_data( + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=self.retrieve_hass_conf["load_negative"], + set_zero_min=self.retrieve_hass_conf["set_zero_min"], + var_replace_zero=self.retrieve_hass_conf["sensor_replace_zero"], + var_interp=self.retrieve_hass_conf["sensor_linear_interp"], + ) self.assertIsInstance(self.rh.df_final, type(pd.DataFrame())) - self.assertEqual(self.rh.df_final.index.isin(self.days_list).sum(), self.df_raw.index.isin(self.days_list).sum()) + self.assertEqual( + self.rh.df_final.index.isin(self.days_list).sum(), + self.df_raw.index.isin(self.days_list).sum(), + ) self.assertEqual(len(self.rh.df_final.columns), len(self.df_raw.columns)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) - self.assertEqual(self.rh.df_final.index.tz, self.retrieve_hass_conf['time_zone']) - - # Test negative load + self.assertEqual( + self.rh.df_final.index.freq, + self.retrieve_hass_conf["optimization_time_step"], + ) + self.assertEqual( + self.rh.df_final.index.tz, self.retrieve_hass_conf["time_zone"] + ) + + # Test negative load def test_prepare_data_negative_load(self): - self.rh.df_final[self.retrieve_hass_conf['sensor_power_load_no_var_loads']] = -self.rh.df_final[self.retrieve_hass_conf['sensor_power_load_no_var_loads']] - self.rh.prepare_data(self.retrieve_hass_conf['sensor_power_load_no_var_loads'], - load_negative = True, - set_zero_min = self.retrieve_hass_conf['set_zero_min'], - var_replace_zero = self.retrieve_hass_conf['sensor_replace_zero'], - var_interp = None) + self.rh.df_final[ + self.retrieve_hass_conf["sensor_power_load_no_var_loads"] + ] = -self.rh.df_final[self.retrieve_hass_conf["sensor_power_load_no_var_loads"]] + self.rh.prepare_data( + self.retrieve_hass_conf["sensor_power_load_no_var_loads"], + load_negative=True, + set_zero_min=self.retrieve_hass_conf["set_zero_min"], + var_replace_zero=self.retrieve_hass_conf["sensor_replace_zero"], + var_interp=None, + ) self.assertIsInstance(self.rh.df_final, type(pd.DataFrame())) - self.assertEqual(self.rh.df_final.index.isin(self.days_list).sum(), self.df_raw.index.isin(self.days_list).sum()) + self.assertEqual( + self.rh.df_final.index.isin(self.days_list).sum(), + self.df_raw.index.isin(self.days_list).sum(), + ) self.assertEqual(len(self.rh.df_final.columns), len(self.df_raw.columns)) - self.assertEqual(self.rh.df_final.index.freq, self.retrieve_hass_conf['optimization_time_step']) - self.assertEqual(self.rh.df_final.index.tz, self.retrieve_hass_conf['time_zone']) - + self.assertEqual( + self.rh.df_final.index.freq, + self.retrieve_hass_conf["optimization_time_step"], + ) + self.assertEqual( + self.rh.df_final.index.tz, self.retrieve_hass_conf["time_zone"] + ) + # Test publish data def test_publish_data(self): - response, data = self.rh.post_data(self.df_raw[self.df_raw.columns[0]], - 25, 'sensor.p_pv_forecast', "Unit", "Variable", - type_var = 'power') + response, data = self.rh.post_data( + self.df_raw[self.df_raw.columns[0]], + 25, + "sensor.p_pv_forecast", + "Unit", + "Variable", + type_var="power", + ) self.assertEqual(response.status_code, 200) - self.assertTrue(data['state']==str(np.round(self.df_raw.loc[self.df_raw.index[25],self.df_raw.columns[0]],2))) - self.assertTrue(data['attributes']['unit_of_measurement']=='Unit') - self.assertTrue(data['attributes']['friendly_name']=='Variable') + self.assertTrue( + data["state"] + == str( + np.round( + self.df_raw.loc[self.df_raw.index[25], self.df_raw.columns[0]], 2 + ) + ) + ) + self.assertTrue(data["attributes"]["unit_of_measurement"] == "Unit") + self.assertTrue(data["attributes"]["friendly_name"] == "Variable") # Lets test publishing a forecast with more added attributes df = copy.deepcopy(self.df_raw.iloc[0:30]) - df.columns = ['P_PV', 'P_Load'] + df.columns = ["P_PV", "P_Load"] df["P_batt"] = 1000.0 df["SOC_opt"] = 0.5 - response, data = self.rh.post_data(df["P_PV"], 25, 'sensor.p_pv_forecast', "W", "PV Forecast", - type_var = 'power') + response, data = self.rh.post_data( + df["P_PV"], 25, "sensor.p_pv_forecast", "W", "PV Forecast", type_var="power" + ) self.assertEqual(response.status_code, 200) - self.assertTrue(data['state']==str(np.round(df.loc[df.index[25],df.columns[0]],2))) - self.assertTrue(data['attributes']['unit_of_measurement']=='W') - self.assertTrue(data['attributes']['friendly_name']=='PV Forecast') - self.assertIsInstance(data['attributes']['forecasts'], list) - response, data = self.rh.post_data(df["P_batt"], 25, 'sensor.p_batt_forecast', "W", "Battery Power Forecast", - type_var = 'batt') + self.assertTrue( + data["state"] == str(np.round(df.loc[df.index[25], df.columns[0]], 2)) + ) + self.assertTrue(data["attributes"]["unit_of_measurement"] == "W") + self.assertTrue(data["attributes"]["friendly_name"] == "PV Forecast") + self.assertIsInstance(data["attributes"]["forecasts"], list) + response, data = self.rh.post_data( + df["P_batt"], + 25, + "sensor.p_batt_forecast", + "W", + "Battery Power Forecast", + type_var="batt", + ) self.assertEqual(response.status_code, 200) - self.assertTrue(data['attributes']['unit_of_measurement']=='W') - self.assertTrue(data['attributes']['friendly_name']=='Battery Power Forecast') - response, data = self.rh.post_data(df["SOC_opt"], 25, 'sensor.SOC_forecast', "%", "Battery SOC Forecast", - type_var = 'SOC') + self.assertTrue(data["attributes"]["unit_of_measurement"] == "W") + self.assertTrue(data["attributes"]["friendly_name"] == "Battery Power Forecast") + response, data = self.rh.post_data( + df["SOC_opt"], + 25, + "sensor.SOC_forecast", + "%", + "Battery SOC Forecast", + type_var="SOC", + ) self.assertEqual(response.status_code, 200) - self.assertTrue(data['attributes']['unit_of_measurement']=='%') - self.assertTrue(data['attributes']['friendly_name']=='Battery SOC Forecast') - - -if __name__ == '__main__': + self.assertTrue(data["attributes"]["unit_of_measurement"] == "%") + self.assertTrue(data["attributes"]["friendly_name"] == "Battery SOC Forecast") + + +if __name__ == "__main__": unittest.main() ch.close() - logger.removeHandler(ch) \ No newline at end of file + logger.removeHandler(ch) diff --git a/tests/test_utils.py b/tests/test_utils.py index 3149acc8..d46051d0 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,9 +1,11 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +import json +import pathlib import unittest + import pandas as pd -import pathlib, json from emhass import utils @@ -11,298 +13,503 @@ root = pathlib.Path(utils.get_root(__file__, num_parent=2)) # Build emhass_conf paths emhass_conf = {} -emhass_conf['data_path'] = root / 'data/' -emhass_conf['root_path'] = root / 'src/emhass/' -emhass_conf['options_path'] = root / 'options.json' -emhass_conf['config_path'] = root / 'config.json' -emhass_conf['secrets_path'] = root / 'secrets_emhass(example).yaml' -emhass_conf['legacy_config_path'] = pathlib.Path(utils.get_root(__file__, num_parent=1)) / 'config_emhass.yaml' -emhass_conf['defaults_path'] = emhass_conf['root_path'] / 'data/config_defaults.json' -emhass_conf['associations_path'] = emhass_conf['root_path'] / 'data/associations.csv' +emhass_conf["data_path"] = root / "data/" +emhass_conf["root_path"] = root / "src/emhass/" +emhass_conf["options_path"] = root / "options.json" +emhass_conf["config_path"] = root / "config.json" +emhass_conf["secrets_path"] = root / "secrets_emhass(example).yaml" +emhass_conf["legacy_config_path"] = ( + pathlib.Path(utils.get_root(__file__, num_parent=1)) / "config_emhass.yaml" +) +emhass_conf["defaults_path"] = emhass_conf["root_path"] / "data/config_defaults.json" +emhass_conf["associations_path"] = emhass_conf["root_path"] / "data/associations.csv" # Create logger logger, ch = utils.get_logger(__name__, emhass_conf, save_to_file=False) + class TestCommandLineUtils(unittest.TestCase): - @staticmethod def get_test_params(): - print(emhass_conf['legacy_config_path']) + print(emhass_conf["legacy_config_path"]) # Build params with default config and secrets - if emhass_conf['defaults_path'].exists(): - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - _,secrets = utils.build_secrets(emhass_conf,logger,no_response=True) + if emhass_conf["defaults_path"].exists(): + config = utils.build_config( + emhass_conf, logger, emhass_conf["defaults_path"] + ) + _, secrets = utils.build_secrets(emhass_conf, logger, no_response=True) # Add Altitude secret manually for testing get_yaml_parse - secrets['Altitude'] = 8000.0 - params = utils.build_params(emhass_conf,secrets,config,logger) + secrets["Altitude"] = 8000.0 + params = utils.build_params(emhass_conf, secrets, config, logger) else: - raise Exception("config_defaults. does not exist in path: "+str(emhass_conf['defaults_path'] )) - + raise Exception( + "config_defaults. does not exist in path: " + + str(emhass_conf["defaults_path"]) + ) + return params def setUp(self): params = TestCommandLineUtils.get_test_params() # Add runtime parameters for forecast lists runtimeparams = { - 'pv_power_forecast':[i+1 for i in range(48)], - 'load_power_forecast':[i+1 for i in range(48)], - 'load_cost_forecast':[i+1 for i in range(48)], - 'prod_price_forecast':[i+1 for i in range(48)] + "pv_power_forecast": [i + 1 for i in range(48)], + "load_power_forecast": [i + 1 for i in range(48)], + "load_cost_forecast": [i + 1 for i in range(48)], + "prod_price_forecast": [i + 1 for i in range(48)], } self.runtimeparams_json = json.dumps(runtimeparams) - params['passed_data'] = runtimeparams - params['optim_conf']['weather_forecast_method'] = 'list' - params['optim_conf']['load_forecast_method'] = 'list' - params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['production_price_forecast_method'] = 'list' + params["passed_data"] = runtimeparams + params["optim_conf"]["weather_forecast_method"] = "list" + params["optim_conf"]["load_forecast_method"] = "list" + params["optim_conf"]["load_cost_forecast_method"] = "list" + params["optim_conf"]["production_price_forecast_method"] = "list" self.params_json = json.dumps(params) def test_build_config(self): # Test building with the different config methods config = {} params = {} - # Test with defaults - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path']) - params = utils.build_params(emhass_conf,{},config,logger) - self.assertTrue(params['optim_conf']['lp_solver'] == "default") - self.assertTrue(params['optim_conf']['lp_solver_path'] == "empty") - self.assertTrue(config['load_peak_hour_periods'] == {'period_hp_1': [{'start': '02:54'}, {'end': '15:24'}], 'period_hp_2': [{'start': '17:24'}, {'end': '20:24'}]}) - self.assertTrue(params['retrieve_hass_conf']['sensor_replace_zero'] == ['sensor.power_photovoltaics']) - # Test with config.json - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path'],emhass_conf['config_path']) - params = utils.build_params(emhass_conf,{},config,logger) - self.assertTrue(params['optim_conf']['lp_solver'] == "default") - self.assertTrue(params['optim_conf']['lp_solver_path'] == "empty") + # Test with defaults + config = utils.build_config(emhass_conf, logger, emhass_conf["defaults_path"]) + params = utils.build_params(emhass_conf, {}, config, logger) + self.assertTrue(params["optim_conf"]["lp_solver"] == "default") + self.assertTrue(params["optim_conf"]["lp_solver_path"] == "empty") + self.assertTrue( + config["load_peak_hour_periods"] + == { + "period_hp_1": [{"start": "02:54"}, {"end": "15:24"}], + "period_hp_2": [{"start": "17:24"}, {"end": "20:24"}], + } + ) + self.assertTrue( + params["retrieve_hass_conf"]["sensor_replace_zero"] + == ["sensor.power_photovoltaics"] + ) + # Test with config.json + config = utils.build_config( + emhass_conf, + logger, + emhass_conf["defaults_path"], + emhass_conf["config_path"], + ) + params = utils.build_params(emhass_conf, {}, config, logger) + self.assertTrue(params["optim_conf"]["lp_solver"] == "default") + self.assertTrue(params["optim_conf"]["lp_solver_path"] == "empty") # Test with legacy config_emhass yaml - config = utils.build_config(emhass_conf,logger,emhass_conf['defaults_path'],legacy_config_path=emhass_conf['legacy_config_path']) - params = utils.build_params(emhass_conf,{},config,logger) - self.assertTrue(params['retrieve_hass_conf']['sensor_replace_zero'] == ['sensor.power_photovoltaics']) - self.assertTrue(config['load_peak_hour_periods'] == {'period_hp_1': [{'start': '02:54'}, {'end': '15:24'}], 'period_hp_2': [{'start': '17:24'}, {'end': '20:24'}]}) - self.assertTrue(params['plant_conf']['battery_charge_efficiency'] == 0.95) + config = utils.build_config( + emhass_conf, + logger, + emhass_conf["defaults_path"], + legacy_config_path=emhass_conf["legacy_config_path"], + ) + params = utils.build_params(emhass_conf, {}, config, logger) + self.assertTrue( + params["retrieve_hass_conf"]["sensor_replace_zero"] + == ["sensor.power_photovoltaics"] + ) + self.assertTrue( + config["load_peak_hour_periods"] + == { + "period_hp_1": [{"start": "02:54"}, {"end": "15:24"}], + "period_hp_2": [{"start": "17:24"}, {"end": "20:24"}], + } + ) + self.assertTrue(params["plant_conf"]["battery_charge_efficiency"] == 0.95) - def test_get_yaml_parse(self): # Test get_yaml_parse with only secrets params = {} - updated_emhass_conf, secrets = utils.build_secrets(emhass_conf,logger) + updated_emhass_conf, secrets = utils.build_secrets(emhass_conf, logger) emhass_conf.update(updated_emhass_conf) params.update(utils.build_params(emhass_conf, secrets, {}, logger)) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(json.dumps(params),logger) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + json.dumps(params), logger + ) self.assertIsInstance(retrieve_hass_conf, dict) self.assertIsInstance(optim_conf, dict) self.assertIsInstance(plant_conf, dict) - self.assertTrue(retrieve_hass_conf['Altitude'] == 4807.8) + self.assertTrue(retrieve_hass_conf["Altitude"] == 4807.8) # Test get_yaml_parse with built params in get_test_params - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(self.params_json,logger) - self.assertTrue(retrieve_hass_conf['Altitude'] == 8000.0) - + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + self.params_json, logger + ) + self.assertTrue(retrieve_hass_conf["Altitude"] == 8000.0) + def test_get_forecast_dates(self): - retrieve_hass_conf, optim_conf, _ = utils.get_yaml_parse(self.params_json,logger) - freq = int(retrieve_hass_conf['optimization_time_step'].seconds/60.0) - delta_forecast = int(optim_conf['delta_forecast_daily'].days) - time_zone = retrieve_hass_conf['time_zone'] + retrieve_hass_conf, optim_conf, _ = utils.get_yaml_parse( + self.params_json, logger + ) + freq = int(retrieve_hass_conf["optimization_time_step"].seconds / 60.0) + delta_forecast = int(optim_conf["delta_forecast_daily"].days) + time_zone = retrieve_hass_conf["time_zone"] forecast_dates = utils.get_forecast_dates(freq, delta_forecast, time_zone) self.assertIsInstance(forecast_dates, pd.core.indexes.datetimes.DatetimeIndex) - self.assertTrue(len(forecast_dates)==int(delta_forecast*60*24/freq)) - + self.assertTrue(len(forecast_dates) == int(delta_forecast * 60 * 24 / freq)) + def test_treat_runtimeparams(self): # Test dayahead runtime params - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(self.params_json,logger) - set_type = 'dayahead-optim' + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + self.params_json, logger + ) + set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - self.runtimeparams_json, self.params_json, - retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) + self.runtimeparams_json, + self.params_json, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) self.assertIsInstance(params, str) params = json.loads(params) - self.assertIsInstance(params['passed_data']['pv_power_forecast'], list) - self.assertIsInstance(params['passed_data']['load_power_forecast'], list) - self.assertIsInstance(params['passed_data']['load_cost_forecast'], list) - self.assertIsInstance(params['passed_data']['prod_price_forecast'], list) - self.assertTrue(optim_conf['weather_forecast_method'] == 'list') - self.assertTrue(optim_conf['load_forecast_method'] == 'list') - self.assertTrue(optim_conf['load_cost_forecast_method'] == 'list') - self.assertTrue(optim_conf['production_price_forecast_method'] == 'list') + self.assertIsInstance(params["passed_data"]["pv_power_forecast"], list) + self.assertIsInstance(params["passed_data"]["load_power_forecast"], list) + self.assertIsInstance(params["passed_data"]["load_cost_forecast"], list) + self.assertIsInstance(params["passed_data"]["prod_price_forecast"], list) + self.assertTrue(optim_conf["weather_forecast_method"] == "list") + self.assertTrue(optim_conf["load_forecast_method"] == "list") + self.assertTrue(optim_conf["load_cost_forecast_method"] == "list") + self.assertTrue(optim_conf["production_price_forecast_method"] == "list") # Test naive MPC runtime params - set_type = 'naive-mpc-optim' + set_type = "naive-mpc-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - self.runtimeparams_json, self.params_json, - retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) + self.runtimeparams_json, + self.params_json, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) self.assertIsInstance(params, str) params = json.loads(params) - self.assertTrue(params['passed_data']['prediction_horizon'] == 10) - self.assertTrue(params['passed_data']['soc_init'] == plant_conf['battery_target_state_of_charge']) - self.assertTrue(params['passed_data']['soc_final'] == plant_conf['battery_target_state_of_charge']) - self.assertTrue(params['passed_data']['operating_hours_of_each_deferrable_load'] == optim_conf['operating_hours_of_each_deferrable_load']) - # Test passing optimization and plant configuration parameters at runtime + self.assertTrue(params["passed_data"]["prediction_horizon"] == 10) + self.assertTrue( + params["passed_data"]["soc_init"] + == plant_conf["battery_target_state_of_charge"] + ) + self.assertTrue( + params["passed_data"]["soc_final"] + == plant_conf["battery_target_state_of_charge"] + ) + self.assertTrue( + params["optim_conf"]["operating_hours_of_each_deferrable_load"] + == optim_conf["operating_hours_of_each_deferrable_load"] + ) + # Test passing optimization and plant configuration parameters at runtime runtimeparams = json.loads(self.runtimeparams_json) - runtimeparams.update({'number_of_deferrable_loads':3}) - runtimeparams.update({'nominal_power_of_deferrable_loads':[3000.0, 750.0, 2500.0]}) - runtimeparams.update({'operating_hours_of_each_deferrable_load':[5, 8, 10]}) - runtimeparams.update({'treat_deferrable_load_as_semi_cont':[True, True, True]}) - runtimeparams.update({'set_deferrable_load_single_constant':[False, False, False]}) - runtimeparams.update({'weight_battery_discharge':2.0}) - runtimeparams.update({'weight_battery_charge':2.0}) - runtimeparams.update({'solcast_api_key':'yoursecretsolcastapikey'}) - runtimeparams.update({'solcast_rooftop_id':'yourrooftopid'}) - runtimeparams.update({'solar_forecast_kwp':5.0}) - runtimeparams.update({'battery_target_state_of_charge':0.4}) - runtimeparams.update({'publish_prefix':'emhass_'}) - runtimeparams.update({'custom_pv_forecast_id':'my_custom_pv_forecast_id'}) - runtimeparams.update({'custom_load_forecast_id':'my_custom_load_forecast_id'}) - runtimeparams.update({'custom_batt_forecast_id':'my_custom_batt_forecast_id'}) - runtimeparams.update({'custom_batt_soc_forecast_id':'my_custom_batt_soc_forecast_id'}) - runtimeparams.update({'custom_grid_forecast_id':'my_custom_grid_forecast_id'}) - runtimeparams.update({'custom_cost_fun_id':'my_custom_cost_fun_id'}) - runtimeparams.update({'custom_optim_status_id':'my_custom_optim_status_id'}) - runtimeparams.update({'custom_unit_load_cost_id':'my_custom_unit_load_cost_id'}) - runtimeparams.update({'custom_unit_prod_price_id':'my_custom_unit_prod_price_id'}) - runtimeparams.update({'custom_deferrable_forecast_id':'my_custom_deferrable_forecast_id'}) - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(self.params_json,logger) - set_type = 'dayahead-optim' + runtimeparams.update({"number_of_deferrable_loads": 3}) + runtimeparams.update( + {"nominal_power_of_deferrable_loads": [3000.0, 750.0, 2500.0]} + ) + runtimeparams.update({"operating_hours_of_each_deferrable_load": [5, 8, 10]}) + runtimeparams.update({"treat_deferrable_load_as_semi_cont": [True, True, True]}) + runtimeparams.update( + {"set_deferrable_load_single_constant": [False, False, False]} + ) + runtimeparams.update({"weight_battery_discharge": 2.0}) + runtimeparams.update({"weight_battery_charge": 2.0}) + runtimeparams.update({"solcast_api_key": "yoursecretsolcastapikey"}) + runtimeparams.update({"solcast_rooftop_id": "yourrooftopid"}) + runtimeparams.update({"solar_forecast_kwp": 5.0}) + runtimeparams.update({"battery_target_state_of_charge": 0.4}) + runtimeparams.update({"publish_prefix": "emhass_"}) + runtimeparams.update({"custom_pv_forecast_id": "my_custom_pv_forecast_id"}) + runtimeparams.update({"custom_load_forecast_id": "my_custom_load_forecast_id"}) + runtimeparams.update({"custom_batt_forecast_id": "my_custom_batt_forecast_id"}) + runtimeparams.update( + {"custom_batt_soc_forecast_id": "my_custom_batt_soc_forecast_id"} + ) + runtimeparams.update({"custom_grid_forecast_id": "my_custom_grid_forecast_id"}) + runtimeparams.update({"custom_cost_fun_id": "my_custom_cost_fun_id"}) + runtimeparams.update({"custom_optim_status_id": "my_custom_optim_status_id"}) + runtimeparams.update( + {"custom_unit_load_cost_id": "my_custom_unit_load_cost_id"} + ) + runtimeparams.update( + {"custom_unit_prod_price_id": "my_custom_unit_prod_price_id"} + ) + runtimeparams.update( + {"custom_deferrable_forecast_id": "my_custom_deferrable_forecast_id"} + ) + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + self.params_json, logger + ) + set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams, self.params_json, - retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) + runtimeparams, + self.params_json, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) self.assertIsInstance(params, str) params = json.loads(params) - self.assertIsInstance(params['passed_data']['pv_power_forecast'], list) - self.assertIsInstance(params['passed_data']['load_power_forecast'], list) - self.assertIsInstance(params['passed_data']['load_cost_forecast'], list) - self.assertIsInstance(params['passed_data']['prod_price_forecast'], list) - self.assertTrue(optim_conf['number_of_deferrable_loads'] == 3) - self.assertTrue(optim_conf['nominal_power_of_deferrable_loads'] == [3000.0, 750.0, 2500.0]) - self.assertTrue(optim_conf['operating_hours_of_each_deferrable_load'] == [5, 8, 10]) - self.assertTrue(optim_conf['treat_deferrable_load_as_semi_cont'] == [True, True, True]) - self.assertTrue(optim_conf['set_deferrable_load_single_constant'] == [False, False, False]) - self.assertTrue(optim_conf['weight_battery_discharge'] == 2.0) - self.assertTrue(optim_conf['weight_battery_charge'] == 2.0) - self.assertTrue(retrieve_hass_conf['solcast_api_key'] == 'yoursecretsolcastapikey') - self.assertTrue(retrieve_hass_conf['solcast_rooftop_id'] == 'yourrooftopid') - self.assertTrue(retrieve_hass_conf['solar_forecast_kwp'] == 5.0) - self.assertTrue(plant_conf['battery_target_state_of_charge'] == 0.4) - self.assertTrue(params['passed_data']['publish_prefix'] == 'emhass_') - self.assertTrue(params['passed_data']['custom_pv_forecast_id'] == 'my_custom_pv_forecast_id') - self.assertTrue(params['passed_data']['custom_load_forecast_id'] == 'my_custom_load_forecast_id') - self.assertTrue(params['passed_data']['custom_batt_forecast_id'] == 'my_custom_batt_forecast_id') - self.assertTrue(params['passed_data']['custom_batt_soc_forecast_id'] == 'my_custom_batt_soc_forecast_id') - self.assertTrue(params['passed_data']['custom_grid_forecast_id'] == 'my_custom_grid_forecast_id') - self.assertTrue(params['passed_data']['custom_cost_fun_id'] == 'my_custom_cost_fun_id') - self.assertTrue(params['passed_data']['custom_optim_status_id'] == 'my_custom_optim_status_id') - self.assertTrue(params['passed_data']['custom_unit_load_cost_id'] == 'my_custom_unit_load_cost_id') - self.assertTrue(params['passed_data']['custom_unit_prod_price_id'] == 'my_custom_unit_prod_price_id') - self.assertTrue(params['passed_data']['custom_deferrable_forecast_id'] == 'my_custom_deferrable_forecast_id') - + self.assertIsInstance(params["passed_data"]["pv_power_forecast"], list) + self.assertIsInstance(params["passed_data"]["load_power_forecast"], list) + self.assertIsInstance(params["passed_data"]["load_cost_forecast"], list) + self.assertIsInstance(params["passed_data"]["prod_price_forecast"], list) + self.assertTrue(optim_conf["number_of_deferrable_loads"] == 3) + self.assertTrue( + optim_conf["nominal_power_of_deferrable_loads"] == [3000.0, 750.0, 2500.0] + ) + self.assertTrue( + optim_conf["operating_hours_of_each_deferrable_load"] == [5, 8, 10] + ) + self.assertTrue( + optim_conf["treat_deferrable_load_as_semi_cont"] == [True, True, True] + ) + self.assertTrue( + optim_conf["set_deferrable_load_single_constant"] == [False, False, False] + ) + self.assertTrue(optim_conf["weight_battery_discharge"] == 2.0) + self.assertTrue(optim_conf["weight_battery_charge"] == 2.0) + self.assertTrue( + retrieve_hass_conf["solcast_api_key"] == "yoursecretsolcastapikey" + ) + self.assertTrue(retrieve_hass_conf["solcast_rooftop_id"] == "yourrooftopid") + self.assertTrue(retrieve_hass_conf["solar_forecast_kwp"] == 5.0) + self.assertTrue(plant_conf["battery_target_state_of_charge"] == 0.4) + self.assertTrue(params["passed_data"]["publish_prefix"] == "emhass_") + self.assertTrue( + params["passed_data"]["custom_pv_forecast_id"] == "my_custom_pv_forecast_id" + ) + self.assertTrue( + params["passed_data"]["custom_load_forecast_id"] + == "my_custom_load_forecast_id" + ) + self.assertTrue( + params["passed_data"]["custom_batt_forecast_id"] + == "my_custom_batt_forecast_id" + ) + self.assertTrue( + params["passed_data"]["custom_batt_soc_forecast_id"] + == "my_custom_batt_soc_forecast_id" + ) + self.assertTrue( + params["passed_data"]["custom_grid_forecast_id"] + == "my_custom_grid_forecast_id" + ) + self.assertTrue( + params["passed_data"]["custom_cost_fun_id"] == "my_custom_cost_fun_id" + ) + self.assertTrue( + params["passed_data"]["custom_optim_status_id"] + == "my_custom_optim_status_id" + ) + self.assertTrue( + params["passed_data"]["custom_unit_load_cost_id"] + == "my_custom_unit_load_cost_id" + ) + self.assertTrue( + params["passed_data"]["custom_unit_prod_price_id"] + == "my_custom_unit_prod_price_id" + ) + self.assertTrue( + params["passed_data"]["custom_deferrable_forecast_id"] + == "my_custom_deferrable_forecast_id" + ) + def test_treat_runtimeparams_failed(self): # Test treatment of nan values params = TestCommandLineUtils.get_test_params() runtimeparams = { - 'pv_power_forecast':[1,2,3,4,5,'nan',7,8,9,10], - 'load_power_forecast':[1,2,'nan',4,5,6,7,8,9,10], - 'load_cost_forecast':[1,2,3,4,5,6,7,8,'nan',10], - 'prod_price_forecast':[1,2,3,4,'nan',6,7,8,9,10] + "pv_power_forecast": [1, 2, 3, 4, 5, "nan", 7, 8, 9, 10], + "load_power_forecast": [1, 2, "nan", 4, 5, 6, 7, 8, 9, 10], + "load_cost_forecast": [1, 2, 3, 4, 5, 6, 7, 8, "nan", 10], + "prod_price_forecast": [1, 2, 3, 4, "nan", 6, 7, 8, 9, 10], } - params['passed_data'] = runtimeparams - params['optim_conf']['weather_forecast_method'] = 'list' - params['optim_conf']['load_forecast_method'] = 'list' - params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['production_price_forecast_method'] = 'list' - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) - set_type = 'dayahead-optim' + params["passed_data"] = runtimeparams + params["optim_conf"]["weather_forecast_method"] = "list" + params["optim_conf"]["load_forecast_method"] = "list" + params["optim_conf"]["load_cost_forecast_method"] = "list" + params["optim_conf"]["production_price_forecast_method"] = "list" + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + params, logger + ) + set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams, params, - retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) - - self.assertTrue(len([x for x in runtimeparams['pv_power_forecast'] if not str(x).isdigit()])>0) - self.assertTrue(len([x for x in runtimeparams['load_power_forecast'] if not str(x).isdigit()])>0) - self.assertTrue(len([x for x in runtimeparams['load_cost_forecast'] if not str(x).isdigit()])>0) - self.assertTrue(len([x for x in runtimeparams['prod_price_forecast'] if not str(x).isdigit()])>0) + runtimeparams, + params, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) + + self.assertTrue( + len([x for x in runtimeparams["pv_power_forecast"] if not str(x).isdigit()]) + > 0 + ) + self.assertTrue( + len( + [ + x + for x in runtimeparams["load_power_forecast"] + if not str(x).isdigit() + ] + ) + > 0 + ) + self.assertTrue( + len( + [x for x in runtimeparams["load_cost_forecast"] if not str(x).isdigit()] + ) + > 0 + ) + self.assertTrue( + len( + [ + x + for x in runtimeparams["prod_price_forecast"] + if not str(x).isdigit() + ] + ) + > 0 + ) # Test list embedded into a string params = TestCommandLineUtils.get_test_params() runtimeparams = { - 'pv_power_forecast':'[1,2,3,4,5,6,7,8,9,10]', - 'load_power_forecast':'[1,2,3,4,5,6,7,8,9,10]', - 'load_cost_forecast':'[1,2,3,4,5,6,7,8,9,10]', - 'prod_price_forecast':'[1,2,3,4,5,6,7,8,9,10]' + "pv_power_forecast": "[1,2,3,4,5,6,7,8,9,10]", + "load_power_forecast": "[1,2,3,4,5,6,7,8,9,10]", + "load_cost_forecast": "[1,2,3,4,5,6,7,8,9,10]", + "prod_price_forecast": "[1,2,3,4,5,6,7,8,9,10]", } - params['passed_data'] = runtimeparams - params['optim_conf']['weather_forecast_method'] = 'list' - params['optim_conf']['load_forecast_method'] = 'list' - params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['production_price_forecast_method'] = 'list' - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) - set_type = 'dayahead-optim' + params["passed_data"] = runtimeparams + params["optim_conf"]["weather_forecast_method"] = "list" + params["optim_conf"]["load_forecast_method"] = "list" + params["optim_conf"]["load_cost_forecast_method"] = "list" + params["optim_conf"]["production_price_forecast_method"] = "list" + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + params, logger + ) + set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams, params, - retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) - self.assertIsInstance(runtimeparams['pv_power_forecast'], list) - self.assertIsInstance(runtimeparams['load_power_forecast'], list) - self.assertIsInstance(runtimeparams['load_cost_forecast'], list) - self.assertIsInstance(runtimeparams['prod_price_forecast'], list) + runtimeparams, + params, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) + self.assertIsInstance(runtimeparams["pv_power_forecast"], list) + self.assertIsInstance(runtimeparams["load_power_forecast"], list) + self.assertIsInstance(runtimeparams["load_cost_forecast"], list) + self.assertIsInstance(runtimeparams["prod_price_forecast"], list) # Test string of numbers params = TestCommandLineUtils.get_test_params() runtimeparams = { - 'pv_power_forecast':'1,2,3,4,5,6,7,8,9,10', - 'load_power_forecast':'1,2,3,4,5,6,7,8,9,10', - 'load_cost_forecast':'1,2,3,4,5,6,7,8,9,10', - 'prod_price_forecast':'1,2,3,4,5,6,7,8,9,10' + "pv_power_forecast": "1,2,3,4,5,6,7,8,9,10", + "load_power_forecast": "1,2,3,4,5,6,7,8,9,10", + "load_cost_forecast": "1,2,3,4,5,6,7,8,9,10", + "prod_price_forecast": "1,2,3,4,5,6,7,8,9,10", } - params['passed_data'] = runtimeparams - params['optim_conf']['weather_forecast_method'] = 'list' - params['optim_conf']['load_forecast_method'] = 'list' - params['optim_conf']['load_cost_forecast_method'] = 'list' - params['optim_conf']['production_price_forecast_method'] = 'list' - retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(params,logger) - set_type = 'dayahead-optim' + params["passed_data"] = runtimeparams + params["optim_conf"]["weather_forecast_method"] = "list" + params["optim_conf"]["load_forecast_method"] = "list" + params["optim_conf"]["load_cost_forecast_method"] = "list" + params["optim_conf"]["production_price_forecast_method"] = "list" + retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse( + params, logger + ) + set_type = "dayahead-optim" params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams( - runtimeparams, params, - retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) - self.assertIsInstance(runtimeparams['pv_power_forecast'], str) - self.assertIsInstance(runtimeparams['load_power_forecast'], str) - self.assertIsInstance(runtimeparams['load_cost_forecast'], str) - self.assertIsInstance(runtimeparams['prod_price_forecast'], str) - + runtimeparams, + params, + retrieve_hass_conf, + optim_conf, + plant_conf, + set_type, + logger, + emhass_conf, + ) + self.assertIsInstance(runtimeparams["pv_power_forecast"], str) + self.assertIsInstance(runtimeparams["load_power_forecast"], str) + self.assertIsInstance(runtimeparams["load_cost_forecast"], str) + self.assertIsInstance(runtimeparams["prod_price_forecast"], str) + def test_build_secrets(self): # Test the build_secrets defaults from get_test_params() params = TestCommandLineUtils.get_test_params() - expected_keys = ['retrieve_hass_conf', 'params_secrets', 'optim_conf', 'plant_conf', 'passed_data'] + expected_keys = [ + "retrieve_hass_conf", + "params_secrets", + "optim_conf", + "plant_conf", + "passed_data", + ] for key in expected_keys: self.assertTrue(key in params.keys()) - self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") - self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "https://myhass.duckdns.org/") - self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "thatverylongtokenhere") + self.assertTrue(params["retrieve_hass_conf"]["time_zone"] == "Europe/Paris") + self.assertTrue( + params["retrieve_hass_conf"]["hass_url"] == "https://myhass.duckdns.org/" + ) + self.assertTrue( + params["retrieve_hass_conf"]["long_lived_token"] == "thatverylongtokenhere" + ) # Test Secrets from options.json params = {} secrets = {} - _, secrets = utils.build_secrets(emhass_conf,logger,options_path=emhass_conf["options_path"],secrets_path="",no_response=True) + _, secrets = utils.build_secrets( + emhass_conf, + logger, + options_path=emhass_conf["options_path"], + secrets_path="", + no_response=True, + ) params = utils.build_params(emhass_conf, secrets, {}, logger) for key in expected_keys: self.assertTrue(key in params.keys()) - self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") - self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "https://myhass.duckdns.org/") - self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "thatverylongtokenhere") + self.assertTrue(params["retrieve_hass_conf"]["time_zone"] == "Europe/Paris") + self.assertTrue( + params["retrieve_hass_conf"]["hass_url"] == "https://myhass.duckdns.org/" + ) + self.assertTrue( + params["retrieve_hass_conf"]["long_lived_token"] == "thatverylongtokenhere" + ) # Test Secrets from secrets_emhass(example).yaml params = {} secrets = {} - _, secrets = utils.build_secrets(emhass_conf,logger,secrets_path=emhass_conf["secrets_path"]) + _, secrets = utils.build_secrets( + emhass_conf, logger, secrets_path=emhass_conf["secrets_path"] + ) params = utils.build_params(emhass_conf, secrets, {}, logger) for key in expected_keys: self.assertTrue(key in params.keys()) - self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") - self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "https://myhass.duckdns.org/") - self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "thatverylongtokenhere") + self.assertTrue(params["retrieve_hass_conf"]["time_zone"] == "Europe/Paris") + self.assertTrue( + params["retrieve_hass_conf"]["hass_url"] == "https://myhass.duckdns.org/" + ) + self.assertTrue( + params["retrieve_hass_conf"]["long_lived_token"] == "thatverylongtokenhere" + ) # Test Secrets from arguments (command_line cli) params = {} secrets = {} - _, secrets = utils.build_secrets(emhass_conf,logger,{"url":"test.url", "key":"test.key" },secrets_path="") - logger.debug("Obtaining long_lived_token from passed argument") + _, secrets = utils.build_secrets( + emhass_conf, logger, {"url": "test.url", "key": "test.key"}, secrets_path="" + ) + logger.debug("Obtaining long_lived_token from passed argument") params = utils.build_params(emhass_conf, secrets, {}, logger) for key in expected_keys: self.assertTrue(key in params.keys()) - self.assertTrue(params['retrieve_hass_conf']['time_zone'] == "Europe/Paris") - self.assertTrue(params['retrieve_hass_conf']['hass_url'] == "test.url") - self.assertTrue(params['retrieve_hass_conf']['long_lived_token'] == "test.key") - - -if __name__ == '__main__': + self.assertTrue(params["retrieve_hass_conf"]["time_zone"] == "Europe/Paris") + self.assertTrue(params["retrieve_hass_conf"]["hass_url"] == "test.url") + self.assertTrue(params["retrieve_hass_conf"]["long_lived_token"] == "test.key") + + +if __name__ == "__main__": unittest.main() ch.close() - logger.removeHandler(ch) \ No newline at end of file + logger.removeHandler(ch)