diff --git a/src/oemof/solph/components/_generic_storage.py b/src/oemof/solph/components/_generic_storage.py index 476ab7f20..5c3dd56f9 100644 --- a/src/oemof/solph/components/_generic_storage.py +++ b/src/oemof/solph/components/_generic_storage.py @@ -120,7 +120,9 @@ class GenericStorage(Node): Determine the lifetime of an outflow; only applicable for multi-period models which can invest in storage capacity and have an invest_relation_output_capacity defined - + multiple_tsam_timegrid : boolean + Relevant in tsam mode. + If true, inter and intra storage content is considers. Notes ----- The following sets, variables, constraints and objective parts are created @@ -186,6 +188,7 @@ def __init__( lifetime_inflow=None, lifetime_outflow=None, custom_attributes=None, + multiple_tsam_timegrid=True, ): if inputs is None: inputs = {} @@ -243,7 +246,7 @@ def __init__( self.invest_relation_output_capacity = invest_relation_output_capacity self.lifetime_inflow = lifetime_inflow self.lifetime_outflow = lifetime_outflow - + self.multiple_tsam_timegrid = multiple_tsam_timegrid # Check number of flows. self._check_number_of_flows() # Check for infeasible parameter combinations @@ -505,7 +508,17 @@ def _storage_content_bound_rule(block, n, t): n.nominal_storage_capacity * n.max_storage_level[t], ) return bounds - + def _storage_content_bound_intra_rule(block, n, p, k, g): + """ + Rule definition for bounds of storage_content variable of + storage n in timestep t. + """ + t = m.get_timestep_from_tsam_timestep(p, k, g) + bounds = ( + n.nominal_storage_capacity * n.min_storage_level[t], + n.nominal_storage_capacity * n.max_storage_level[t], + ) + return bounds if not m.TSAM_MODE: self.storage_content = Var( self.STORAGES, m.TIMEPOINTS, bounds=_storage_content_bound_rule @@ -520,23 +533,37 @@ def _storage_content_bound_rule(block, n, t): ) self.storage_content[n, 0].fix() else: - self.storage_content_inter = Var( - self.STORAGES, m.CLUSTERS_OFFSET, within=NonNegativeReals - ) - self.storage_content_intra = Var( - self.STORAGES, m.TIMEINDEX_TYPICAL_CLUSTER_OFFSET - ) + for n in group: + if n.multiple_tsam_timegrid: + self.storage_content_inter = Var( + self.STORAGES, m.CLUSTERS_OFFSET, within=NonNegativeReals + ) + self.storage_content_intra = Var( + self.STORAGES, m.TIMEINDEX_TYPICAL_CLUSTER_OFFSET + ) + else: + self.storage_content_intra = Var( + self.STORAGES, m.TIMEINDEX_TYPICAL_CLUSTER_OFFSET, bounds=_storage_content_bound_intra_rule + ) # set the initial intra storage content # first timestep in intra storage is always zero for n in group: for p, k in m.TYPICAL_CLUSTERS: - self.storage_content_intra[n, p, k, 0] = 0 - self.storage_content_intra[n, p, k, 0].fix() - if n.initial_storage_level is not None: - self.storage_content_inter[n, 0] = ( - n.initial_storage_level * n.nominal_storage_capacity - ) - self.storage_content_inter[n, 0].fix() + if n.multiple_tsam_timegrid: + self.storage_content_intra[n, p, k, 0] = 0 + self.storage_content_intra[n, p, k, 0].fix() + else: + if n.initial_storage_level is not None: + self.storage_content_intra[n, p, k, 0] = n.initial_storage_level * \ + n.nominal_storage_capacity + self.storage_content_intra[n, p, k, 0].fix() + if n.initial_storage_level is not None and n.multiple_tsam_timegrid: + self.storage_content_inter[n, 0] = ( + n.initial_storage_level * n.nominal_storage_capacity + ) + self.storage_content_inter[n, 0].fix() + + # ************* Constraints *************************** def _storage_inter_minimum_level_rule(block): @@ -566,13 +593,15 @@ def _storage_inter_minimum_level_rule(block): ) if m.TSAM_MODE: - self.storage_inter_minimum_level = Constraint( - self.STORAGES, m.TIMEINDEX_CLUSTER, noruleinit=True - ) + for n in self.STORAGES: + if n.multiple_tsam_timegrid: + self.storage_inter_minimum_level = Constraint( + self.STORAGES, m.TIMEINDEX_CLUSTER, noruleinit=True + ) - self.storage_inter_minimum_level_build = BuildAction( - rule=_storage_inter_minimum_level_rule - ) + self.storage_inter_minimum_level_build = BuildAction( + rule=_storage_inter_minimum_level_rule + ) def _storage_inter_maximum_level_rule(block): for n in self.STORAGES: @@ -598,13 +627,15 @@ def _storage_inter_maximum_level_rule(block): ) if m.TSAM_MODE: - self.storage_inter_maximum_level = Constraint( - self.STORAGES, m.TIMEINDEX_CLUSTER, noruleinit=True - ) + for n in self.STORAGES: + if n.multiple_tsam_timegrid: + self.storage_inter_maximum_level = Constraint( + self.STORAGES, m.TIMEINDEX_CLUSTER, noruleinit=True + ) - self.storage_inter_maximum_level_build = BuildAction( - rule=_storage_inter_maximum_level_rule - ) + self.storage_inter_maximum_level_build = BuildAction( + rule=_storage_inter_maximum_level_rule + ) def _storage_balance_rule(block, n, p, t): """ @@ -702,11 +733,13 @@ def _inter_storage_balance_rule(block, n, i): return expr == 0 if m.TSAM_MODE: - self.inter_balance = Constraint( - self.STORAGES, - m.CLUSTERS, - rule=_inter_storage_balance_rule, - ) + for n in self.STORAGES: + if n.multiple_tsam_timegrid: + self.inter_balance = Constraint( + self.STORAGES, + m.CLUSTERS, + rule=_inter_storage_balance_rule, + ) def _balanced_storage_rule(block, n): """ @@ -733,9 +766,11 @@ def _balanced_inter_storage_rule(block, n): self.STORAGES_BALANCED, rule=_balanced_storage_rule ) else: - self.balanced_cstr = Constraint( - self.STORAGES_BALANCED, rule=_balanced_inter_storage_rule - ) + for n in self.STORAGES: + if n.multiple_tsam_timegrid: + self.balanced_cstr = Constraint( + self.STORAGES_BALANCED, rule=_balanced_inter_storage_rule + ) def _power_coupled(block): """ diff --git a/src/oemof/solph/processing.py b/src/oemof/solph/processing.py index f2e1e0fd5..f84bd9fdb 100644 --- a/src/oemof/solph/processing.py +++ b/src/oemof/solph/processing.py @@ -566,7 +566,6 @@ def _calculate_soc_from_inter_and_intra_soc(soc, storage, tsa_parameters): t_offset = 0 for p, tsa_period in enumerate(tsa_parameters): for i, k in enumerate(tsa_period["order"]): - inter_value = soc["inter"].iloc[i_offset + i]["value"] # Self-discharge has to be taken into account for calculating # inter SOC for each timestep in cluster t0 = t_offset + i * tsa_period["timesteps"] @@ -581,32 +580,38 @@ def _calculate_soc_from_inter_and_intra_soc(soc, storage, tsa_parameters): if is_last_timestep else tsa_period["timesteps"] ) - inter_series = ( - pd.Series( - itertools.accumulate( - ( - (1 - storage.loss_rate[t]) - ** tsa_period["segments"][(k, t - t0)] - if "segments" in tsa_period - else 1 - storage.loss_rate[t] - for t in range( + intra_series = soc["intra"][(p, k)].iloc[0:timesteps] + if storage.multiple_tsam_timegrid: + inter_value = soc["inter"].iloc[i_offset + i]["value"] + inter_series = ( + pd.Series( + itertools.accumulate( + ( + (1 - storage.loss_rate[t]) + ** tsa_period["segments"][(k, t - t0)] + if "segments" in tsa_period + else 1 - storage.loss_rate[t] + for t in range( t0, t0 + timesteps - 1, ) - ), - operator.mul, - initial=1, + ), + operator.mul, + initial=1, + ) ) + * inter_value + ) + soc_frame = pd.DataFrame( + intra_series["value"].values + + inter_series.values, # Neglect indexes, otherwise none + columns=["value"], + ) + else: + soc_frame = pd.DataFrame( + intra_series["value"].values, + columns=["value"], ) - * inter_value - ) - intra_series = soc["intra"][(p, k)].iloc[0:timesteps] - soc_frame = pd.DataFrame( - intra_series["value"].values - + inter_series.values, # Neglect indexes, otherwise none - columns=["value"], - ) - # Disaggregate segmentation if "segments" in tsa_period: soc_disaggregated = _disaggregate_segmentation(