Skip to content

Commit

Permalink
Merge branch 'master' into runtime_clean
Browse files Browse the repository at this point in the history
  • Loading branch information
GeoDerp authored Nov 28, 2024
2 parents b95505d + 6d3a8d6 commit 470e3e7
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 22 deletions.
41 changes: 23 additions & 18 deletions src/emhass/command_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -798,25 +798,30 @@ def publish_data(input_data_dict: dict, logger: logging.Logger,
if os.path.exists(entity_path) and len(os.listdir(entity_path)) > 0:
# Obtain all files in entity_path
entity_path_contents = os.listdir(entity_path)
for entity in entity_path_contents:
if entity != "metadata.json":
# If publish_prefix is "all" publish all saved entities to Home Assistant
# If publish_prefix matches the prefix from saved entities, publish to Home Assistant
if publish_prefix in entity or publish_prefix == "all":
entity_data = publish_json(entity,input_data_dict,entity_path,logger)
if not isinstance(entity_data, bool):
opt_res_list.append(entity_data)
opt_res_list_names.append(entity.replace(".json", ""))
else:
return False
# Build a DataFrame with published entities
opt_res = pd.concat(opt_res_list, axis=1)
opt_res.columns = opt_res_list_names
return opt_res
# Confirm the entity path contains at least one file containing publish prefix or publish_prefix='all'
if any(publish_prefix in entity for entity in entity_path_contents) or publish_prefix == "all":
# Loop through all items in entity path
for entity in entity_path_contents:
# If publish_prefix is "all" publish all saved entities to Home Assistant
# If publish_prefix matches the prefix from saved entities, publish to Home Assistant
if entity != "metadata.json" and (publish_prefix in entity or publish_prefix == "all"):
entity_data = publish_json(entity,input_data_dict,entity_path,logger)
if not isinstance(entity_data, bool):
opt_res_list.append(entity_data)
opt_res_list_names.append(entity.replace(".json", ""))
else:
return False
# Build a DataFrame with published entities
opt_res = pd.concat(opt_res_list, axis=1)
opt_res.columns = opt_res_list_names
return opt_res
else:
logger.warning("No saved entity json files that match prefix: " + str(publish_prefix))
logger.warning("Falling back to opt_res_latest")
else:
logger.warning("no saved entity json files in path:" + str(entity_path))
logger.warning("falling back to opt_res_latest")
filename = "opt_res_latest.csv"
logger.warning("No saved entity json files in path:" + str(entity_path))
logger.warning("Falling back to opt_res_latest")
filename = "opt_res_latest.csv"
else:
filename = "opt_res_latest.csv"
if opt_res_latest is None:
Expand Down
2 changes: 1 addition & 1 deletion src/emhass/data/associations.csv
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
config_categorie,legacy_parameter_name,parameter,list_name
retrieve_hass_conf,logging_level,logging_level
retrieve_hass_conf,freq,optimization_time_step
retrieve_hass_conf,days_to_retrieve,historic_days_to_retrieve
retrieve_hass_conf,var_PV,sensor_power_photovoltaics
Expand All @@ -15,6 +14,7 @@ params_secrets,lat,Latitude
params_secrets,lon,Longitude
params_secrets,alt,Altitude
optim_conf,costfun,costfun
optim_conf,logging_level,logging_level
optim_conf,set_use_battery,set_use_battery
optim_conf,num_def_loads,number_of_deferrable_loads
optim_conf,P_deferrable_nom,nominal_power_of_deferrable_loads,list_nominal_power_of_deferrable_loads
Expand Down
1 change: 0 additions & 1 deletion src/emhass/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,6 @@ def treat_runtimeparams(
params["passed_data"]["historic_days_to_retrieve"] = params[
"retrieve_hass_conf"
]["historic_days_to_retrieve"]

if "model_type" not in runtimeparams.keys():
model_type = "load_forecast"
else:
Expand Down
12 changes: 10 additions & 2 deletions src/emhass/web_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,9 @@ def action_call(action_name):
if (emhass_conf['data_path'] / 'params.pkl').exists():
with open(str(emhass_conf['data_path'] / 'params.pkl'), "rb") as fid:
emhass_conf['config_path'], params = pickle.load(fid)
# Set local costfun variable
if params.get("optim_conf",None) is not None:
costfun = params["optim_conf"].get("costfun","profit")
params = json.dumps(params)
else:
app.logger.error("Unable to find params.pkl file")
Expand All @@ -322,6 +325,7 @@ def action_call(action_name):

ActionStr = " >> Setting input data dict"
app.logger.info(ActionStr)
app.logger.warning(costfun)
input_data_dict = set_input_data_dict(emhass_conf, costfun,
params, runtimeparams, action_name, app.logger)
if not input_data_dict:
Expand Down Expand Up @@ -497,6 +501,7 @@ def action_call(action_name):
if type(config) is bool and not config:
raise Exception("Failed to find default config")

# Set local variables
costfun = os.getenv('LOCAL_COSTFUN', config.get('costfun', 'profit'))
logging_level = os.getenv('LOGGING_LEVEL', config.get('logging_level','INFO'))
# Temporary set logging level if debug
Expand Down Expand Up @@ -536,7 +541,11 @@ def action_call(action_name):
params = build_params(emhass_conf, params_secrets, config, app.logger)
if type(params) is bool:
raise Exception("A error has occurred while building params")
# Update params with local variables
params["optim_conf"]["costfun"] = costfun
params["optim_conf"]["logging_level"] = logging_level

# Save params to file for later reference
if os.path.exists(str(emhass_conf['data_path'])):
with open(str(emhass_conf['data_path'] / 'params.pkl'), "wb") as fid:
pickle.dump((config_path, params), fid)
Expand Down Expand Up @@ -598,5 +607,4 @@ def action_call(action_name):
app.logger.info("Using core emhass version: "+version('emhass'))
except PackageNotFoundError:
app.logger.info("Using development emhass version")
serve(app, host=server_ip, port=port, threads=8)

serve(app, host=server_ip, port=port, threads=8)

0 comments on commit 470e3e7

Please sign in to comment.