Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
ekatef authored Jan 13, 2025
2 parents be42f1d + 62cab73 commit f3c0ea7
Show file tree
Hide file tree
Showing 8 changed files with 92 additions and 37 deletions.
3 changes: 3 additions & 0 deletions .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@

version: 2

sphinx:
configuration: doc/conf.py

build:
os: ubuntu-22.04
tools:
Expand Down
36 changes: 12 additions & 24 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,11 @@ CDIR = RDIR if not run.get("shared_cutouts") else ""
SECDIR = run["sector_name"] + "/" if run.get("sector_name") else ""
SDIR = config["summary_dir"].strip("/") + f"/{SECDIR}"
RESDIR = config["results_dir"].strip("/") + f"/{SECDIR}"
COSTDIR = config["costs_dir"]

load_data_paths = get_load_paths_gegis("data", config)

if config["enable"].get("retrieve_cost_data", True):
COSTS = "resources/" + RDIR + "costs.csv"
COSTS = "resources/" + RDIR + f"costs_{config['costs']['year']}.csv"
else:
COSTS = "data/costs.csv"
ATLITE_NPROCESSES = config["atlite"].get("nprocesses", 4)
Expand Down Expand Up @@ -392,29 +391,18 @@ if not config["enable"].get("build_natura_raster", False):
if config["enable"].get("retrieve_cost_data", True):

rule retrieve_cost_data:
params:
version=config["costs"]["version"],
input:
HTTP.remote(
f"raw.githubusercontent.com/PyPSA/technology-data/{config['costs']['version']}/outputs/costs_{config['costs']['year']}.csv",
f"raw.githubusercontent.com/PyPSA/technology-data/{config['costs']['version']}/outputs/"
+ "costs_{year}.csv",
keep_local=True,
),
output:
COSTS,
"resources/" + RDIR + "costs_{year}.csv",
log:
"logs/" + RDIR + "retrieve_cost_data.log",
resources:
mem_mb=5000,
run:
move(input[0], output[0])

rule retrieve_cost_data_flexible:
input:
HTTP.remote(
f"raw.githubusercontent.com/PyPSA/technology-data/{config['costs']['version']}/outputs/costs"
+ "_{planning_horizons}.csv",
keep_local=True,
),
output:
costs=COSTDIR + "costs_{planning_horizons}.csv",
"logs/" + RDIR + "retrieve_cost_data_{year}.log",
resources:
mem_mb=5000,
run:
Expand Down Expand Up @@ -1071,7 +1059,7 @@ rule prepare_sector_network:
input:
network=RESDIR
+ "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}_presec.nc",
costs=COSTDIR + "costs_{planning_horizons}.csv",
costs="resources/" + RDIR + "costs_{planning_horizons}.csv",
h2_cavern="data/hydrogen_salt_cavern_potentials.csv",
nodal_energy_totals="resources/"
+ SECDIR
Expand Down Expand Up @@ -1165,7 +1153,7 @@ rule add_export:
input:
overrides="data/override_component_attrs",
export_ports="resources/" + SECDIR + "export_ports.csv",
costs=COSTDIR + "costs_{planning_horizons}.csv",
costs="resources/" + RDIR + "costs_{planning_horizons}.csv",
ship_profile="resources/" + SECDIR + "ship_profile_{h2export}TWh.csv",
network=RESDIR
+ "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}.nc",
Expand Down Expand Up @@ -1632,7 +1620,7 @@ if config["foresight"] == "overnight":
# + "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}.nc",
network=RESDIR
+ "prenetworks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}_{planning_horizons}_{discountrate}_{demand}_{h2export}export.nc",
costs=COSTDIR + "costs_{planning_horizons}.csv",
costs="resources/" + RDIR + "costs_{planning_horizons}.csv",
configs=SDIR + "configs/config.yaml", # included to trigger copy_config rule
output:
RESDIR
Expand Down Expand Up @@ -1677,7 +1665,7 @@ rule make_sector_summary:
**config["costs"],
**config["export"],
),
costs=COSTDIR + "costs_{planning_horizons}.csv",
costs="resources/" + RDIR + "costs_{planning_horizons}.csv",
plots=expand(
RESDIR
+ "maps/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{sopts}-costs-all_{planning_horizons}_{discountrate}_{demand}_{h2export}export.pdf",
Expand Down Expand Up @@ -1915,7 +1903,7 @@ rule build_industry_demand: #default data
+ SECDIR
+ "demand/base_industry_totals_{planning_horizons}_{demand}.csv",
industrial_database="data/industrial_database.csv",
costs=COSTDIR + "costs_{planning_horizons}.csv",
costs="resources/" + RDIR + "costs_{planning_horizons}.csv",
industry_growth_cagr="data/demand/industry_growth_cagr.csv",
output:
industrial_energy_demand_per_node="resources/"
Expand Down
3 changes: 1 addition & 2 deletions config.default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ logging:

results_dir: results/
summary_dir: results/
costs_dir: data/ # TODO change to the equivalent of technology data

foresight: overnight

Expand Down Expand Up @@ -358,7 +357,7 @@ renewable:
# Costs Configuration
costs:
year: 2030
version: v0.6.2
version: v0.10.0
discountrate: [0.071] #, 0.086, 0.111]
# [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html # noqa: E501
USD2013_to_EUR2013: 0.7532 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html
Expand Down
6 changes: 6 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,17 @@ This part of documentation collects descriptive release notes to capture the mai

**New Features and Major Changes**

* Drop duplication of retrieve_data and COST_DIR, add params and update technology-data version `PR #1249 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1249>`__

* In alternative clustering, generate hydro inflows by shape and avoid hydro inflows duplication for plants installed in the same node `PR #1120 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1120>`

**Minor Changes and bug-fixing**

* Prevent computation of powerplantmatching if replace option is selected for custom_powerplants `PR #1281 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1281>`__

* Fix overlapping bus regions when alternative clustering is selected `PR #1287 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1287>`__

* Fix readthedocs by explicitly specifying the location of the Sphinx config `PR #1292 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1292>`__

PyPSA-Earth 0.6.0
=================
Expand Down
5 changes: 4 additions & 1 deletion scripts/add_electricity.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,10 @@ def attach_hydro(n, costs, ppl):
ror = ppl.query('technology == "Run-Of-River"')
phs = ppl.query('technology == "Pumped Storage"')
hydro = ppl.query('technology == "Reservoir"')
bus_id = ppl["bus"]
if snakemake.params.alternative_clustering:
bus_id = ppl["region_id"]
else:
bus_id = ppl["bus"]

inflow_idx = ror.index.union(hydro.index)
if not inflow_idx.empty:
Expand Down
20 changes: 20 additions & 0 deletions scripts/build_bus_regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,26 @@ def get_gadm_shape(
crs=country_shapes.crs,
).dropna(axis="index", subset=["geometry"])

if snakemake.params.alternative_clustering:
# determine isolated buses
n.determine_network_topology()
non_isolated_buses = n.buses.duplicated(subset=["sub_network"], keep=False)
isolated_buses = n.buses[~non_isolated_buses].index
non_isolated_regions = onshore_regions[
~onshore_regions.name.isin(isolated_buses)
]
isolated_regions = onshore_regions[onshore_regions.name.isin(isolated_buses)]

# Combine regions while prioritizing non-isolated ones
onshore_regions = pd.concat(
[non_isolated_regions, isolated_regions]
).drop_duplicates("shape_id", keep="first")

if len(onshore_regions) < len(gadm_country):
logger.warning(
f"The number of remaining of buses are less than the number of administrative clusters suggested!"
)

onshore_regions = pd.concat([onshore_regions], ignore_index=True).to_file(
snakemake.output.regions_onshore
)
Expand Down
53 changes: 45 additions & 8 deletions scripts/build_renewable_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,9 @@ def rescale_hydro(plants, runoff, normalize_using_yearly, normalization_year):
logger.info("No bus has installed hydro plants, ignoring normalization.")
return runoff

if snakemake.params.alternative_clustering:
plants = plants.set_index("shape_id")

years_statistics = normalize_using_yearly.index
if isinstance(years_statistics, pd.DatetimeIndex):
years_statistics = years_statistics.year
Expand Down Expand Up @@ -530,6 +533,24 @@ def create_scaling_factor(
# the region should be restricted for non-hydro technologies, as the hydro potential is calculated across hydrobasins which may span beyond the region of the country
cutout = filter_cutout_region(cutout, regions)

if snakemake.params.alternative_clustering:
regions = gpd.GeoDataFrame(
regions.reset_index()
.groupby("shape_id")
.agg(
{
"x": "mean",
"y": "mean",
"country": "first",
"geometry": "first",
"bus": "first",
}
)
.reset_index()
.set_index("bus"),
crs=regions.crs,
)

buses = regions.index

func = getattr(cutout, resource.pop("method"))
Expand All @@ -556,10 +577,17 @@ def create_scaling_factor(
# select busbar whose location (p) belongs to at least one hydrobasin geometry
# if extendable option is true, all buses are included
# otherwise only where hydro powerplants are available are considered
filter_bus_to_consider = regions.index.map(
lambda bus_id: config.get("extendable", False)
| (bus_id in hydro_ppls.bus.values)
)
if snakemake.params.alternative_clustering:
filter_bus_to_consider = regions.index.map(
lambda bus_id: config.get("extendable", False)
| (bus_id in hydro_ppls.region_id.values)
)
### TODO: quickfix. above case and the below case should by unified
if snakemake.params.alternative_clustering == False:
filter_bus_to_consider = regions.index.map(
lambda bus_id: config.get("extendable", False)
| (bus_id in hydro_ppls.bus.values)
)
bus_to_consider = regions.index[filter_bus_to_consider]

# identify subset of buses within the hydrobasins
Expand All @@ -577,10 +605,17 @@ def create_scaling_factor(
columns={"x": "lon", "y": "lat", "country": "countries"}
).loc[bus_in_hydrobasins, ["lon", "lat", "countries", "shape_id"]]

resource["plants"]["installed_hydro"] = [
True if (bus_id in hydro_ppls.bus.values) else False
for bus_id in resource["plants"].index
]
# TODO: these cases shall be fixed by restructuring the alternative clustering procedure
if snakemake.params.alternative_clustering == False:
resource["plants"]["installed_hydro"] = [
True if (bus_id in hydro_ppls.bus.values) else False
for bus_id in resource["plants"].index
]
else:
resource["plants"]["installed_hydro"] = [
True if (bus_id in hydro_ppls.region_id.values) else False
for bus_id in resource["plants"].shape_id.values
]

# get normalization before executing runoff
normalization = None
Expand All @@ -596,6 +631,8 @@ def create_scaling_factor(
else:
# otherwise perform the calculations
inflow = correction_factor * func(capacity_factor=True, **resource)
if snakemake.params.alternative_clustering:
inflow["plant"] = regions.shape_id.loc[inflow["plant"]].values

if "clip_min_inflow" in config:
inflow = inflow.where(inflow >= config["clip_min_inflow"], 0)
Expand Down
3 changes: 1 addition & 2 deletions test/config.test_myopic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ tutorial: true

results_dir: results/
summary_dir: results/
costs_dir: data/ #TODO change to the equivalent of technology data

run:
name: "test_myopic" # use this to keep track of runs with different settings
Expand Down Expand Up @@ -99,7 +98,7 @@ custom_data:


costs: # Costs used in PyPSA-Earth-Sec. Year depends on the wildcard planning_horizon in the scenario section
version: v0.6.2
version: v0.10.0
lifetime: 25 #default lifetime
# From a Lion Hirth paper, also reflects average of Noothout et al 2016
discountrate: [0.071] #, 0.086, 0.111]
Expand Down

0 comments on commit f3c0ea7

Please sign in to comment.