Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ Second, install the necessary dependencies using `environment.yml` file. The fol

```
conda env create -f envs/environment.yml
conda activate 247-cfe
conda activate 247-env
```
Third, to run all the scenarios from the study, run the [snakemake](https://snakemake.readthedocs.io/en/stable/) worflow:

Expand Down Expand Up @@ -114,7 +114,7 @@ conda activate 247-env
3. The results of the paper can be reproduced by running the [snakemake](https://snakemake.readthedocs.io/en/stable/) workflow. The following commands will run the workflows for the paper:

```
snakemake --cores <n> --configfile config_247cfe
snakemake --cores <n> --configfile config_247cfe.yaml
snakemake --cores <n> --configfile config_BackgroundSystem.yaml
```

Expand Down
17 changes: 5 additions & 12 deletions Snakefile
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
from shutil import copyfile, move
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider

HTTP = HTTPRemoteProvider()


configfile: "config.yaml"

Expand All @@ -14,10 +10,7 @@ wildcard_constraints:
RDIR = os.path.join(config["results_dir"], config["run"])
RUN = config["run"]

# Technology data inputs
version = config["technology_data"]["version"]
year = config["technology_data"]["year"]
url = f"https://raw.githubusercontent.com/PyPSA/technology-data/{version}/outputs/costs_{year}.csv"


rule merge_all_plots:
Expand Down Expand Up @@ -154,15 +147,15 @@ rule copy_config:
if config.get("retrieve_cost_data", True):

rule retrieve_cost_data:
input:
HTTP.remote(url, keep_local=True),
params:
version=config["technology_data"]["version"],
output:
f"input/costs_{year}.csv",
# log: f"logs/{RDIR}retrieve_cost_data_{year}.log"
resources:
mem_mb=1000,
run:
move(input[0], output[0])
retries: 2
script:
"scripts/retrieve_cost_data.py"


# additional rules for cluster communication -> not included into a workflow
Expand Down
1 change: 1 addition & 0 deletions config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,7 @@ tech_colors:
"iron-air": "#d26303"
"iron-air inverter": "#d26303"
"iron-air storage": "#d26303"
"ironair storage": "#d26303"
"hydrogen storage": "#990090"
"hydrogen fuel cell": "#990090"
"hydrogen electrolysis": "#550055"
Expand Down
102 changes: 28 additions & 74 deletions scripts/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,37 @@
#
# SPDX-License-Identifier: MIT

import pypsa, numpy as np, pandas as pd
from pathlib import Path
import yaml
import difflib

import requests
from tqdm import tqdm

def progress_retrieve(url, file, disable=False):
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)"}
# Hotfix - Bug, tqdm not working with disable=False
disable = True

if disable:
response = requests.get(url, headers=headers, stream=True)
with open(file, "wb") as f:
f.write(response.content)
else:
response = requests.get(url, headers=headers, stream=True)
total_size = int(response.headers.get("content-length", 0))
chunk_size = 1024

with tqdm(
total=total_size,
unit="B",
unit_scale=True,
unit_divisor=1024,
desc=str(file),
) as t:
with open(file, "wb") as f:
for data in response.iter_content(chunk_size=chunk_size):
f.write(data)
t.update(len(data))

def load_yaml(file_path):
with open(file_path, "r") as file:
Expand All @@ -31,78 +57,6 @@ def compare_yaml(file1, file2):
# compare_yaml("file1.yaml", "file2.yaml")


def override_component_attrs():
# from https://github.com/PyPSA/pypsa-eur-sec/blob/93eb86eec87d34832ebc061697e289eabb38c105/scripts/solve_network.py
override_component_attrs = pypsa.descriptors.Dict(
{k: v.copy() for k, v in pypsa.components.component_attrs.items()}
)
override_component_attrs["Link"].loc["bus2"] = [
"string",
np.nan,
np.nan,
"2nd bus",
"Input (optional)",
]
override_component_attrs["Link"].loc["bus3"] = [
"string",
np.nan,
np.nan,
"3rd bus",
"Input (optional)",
]
override_component_attrs["Link"].loc["bus4"] = [
"string",
np.nan,
np.nan,
"4th bus",
"Input (optional)",
]
override_component_attrs["Link"].loc["efficiency2"] = [
"static or series",
"per unit",
1.0,
"2nd bus efficiency",
"Input (optional)",
]
override_component_attrs["Link"].loc["efficiency3"] = [
"static or series",
"per unit",
1.0,
"3rd bus efficiency",
"Input (optional)",
]
override_component_attrs["Link"].loc["efficiency4"] = [
"static or series",
"per unit",
1.0,
"4th bus efficiency",
"Input (optional)",
]
override_component_attrs["Link"].loc["p2"] = [
"series",
"MW",
0.0,
"2nd bus output",
"Output",
]
override_component_attrs["Link"].loc["p3"] = [
"series",
"MW",
0.0,
"3rd bus output",
"Output",
]
override_component_attrs["Link"].loc["p4"] = [
"series",
"MW",
0.0,
"4th bus output",
"Output",
]

return override_component_attrs


def mock_snakemake(rulename, **wildcards):
"""
This function is expected to be executed from the 'scripts'-directory of '
Expand Down
4 changes: 2 additions & 2 deletions scripts/plot_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def plot_map(
# Drop data center nodes
for name in datacenters:
if name in n.buses.index:
n.mremove("Bus", [name])
n.remove("Bus", [name])

# Empty dataframe indexed with electrical buses
index = pd.DataFrame(index=n.buses.index)
Expand Down Expand Up @@ -259,7 +259,7 @@ def plot_datacenters(network, datacenters):
# Drop data center nodes
for name in datacenters:
if name in n.buses.index:
n.mremove("Bus", [name])
n.remove("Bus", [name])

# Load the geometries of datacenters
world = gpd.read_file(gpd.datasets.get_path("naturalearth_lowres"))
Expand Down
41 changes: 41 additions & 0 deletions scripts/retrieve_cost_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# SPDX-FileCopyrightText: Contributors to PyPSA-Eur <https://github.com/pypsa/pypsa-eur>
#
# SPDX-License-Identifier: MIT
"""
Retrieve cost data from ``technology-data``.
"""

import logging
from pathlib import Path
from _helpers import progress_retrieve

logger = logging.getLogger(__name__)

if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake

snakemake = mock_snakemake("retrieve_cost_data", year=2030)
rootpath = ".."
else:
rootpath = "."

version = snakemake.params.version
if "/" in version:
baseurl = f"https://raw.githubusercontent.com/{version}/outputs/"
else:
baseurl = f"https://raw.githubusercontent.com/PyPSA/technology-data/{version}/outputs/"
filepath = Path(snakemake.output[0])
url = baseurl + filepath.name

print(url)

to_fn = Path(rootpath) / filepath

print(to_fn)

logger.info(f"Downloading technology data from '{url}'.")
disable_progress = False
progress_retrieve(url, to_fn, disable=disable_progress)

logger.info(f"Technology data available at at {to_fn}")
28 changes: 13 additions & 15 deletions scripts/solve_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

from vresutils.costdata import annuity
from vresutils.benchmark import memory_logger
from _helpers import override_component_attrs

from typing import Dict, List, Tuple, Any

Expand Down Expand Up @@ -419,7 +418,7 @@ def strip_network(n, config) -> None:
nodes_to_keep.extend(new_nodes)
nodes_to_keep.extend(config["additional_nodes"])

n.mremove("Bus", n.buses.index.symmetric_difference(nodes_to_keep))
n.remove("Bus", n.buses.index.symmetric_difference(nodes_to_keep))

# make sure lines are kept
n.lines.carrier = "AC"
Expand All @@ -437,7 +436,7 @@ def strip_network(n, config) -> None:
location_boolean = c.df.bus.isin(nodes_to_keep)
to_keep = c.df.index[location_boolean & c.df.carrier.isin(carrier_to_keep)]
to_drop = c.df.index.symmetric_difference(to_keep)
n.mremove(c.name, to_drop)
n.remove(c.name, to_drop)


def shutdown_lineexp(n: pypsa.Network) -> None:
Expand Down Expand Up @@ -1186,8 +1185,8 @@ def vl_constraints(n):
vls_snd = vls.query("bus0==@name").index
vls_rec = vls.query("bus1==@name").index

snd = n.model["Link-p"].loc[:, vls_snd].sum(dims=["Link"])
rec = n.model["Link-p"].loc[:, vls_rec].sum(dims=["Link"])
snd = n.model["Link-p"].loc[:, vls_snd].sum(dim=["Link"])
rec = n.model["Link-p"].loc[:, vls_rec].sum(dim=["Link"])
load = n.loads_t.p_set[name + " load"]
# requested_load = load + rec - snd
rhs_up = load * (1 + delta) - load
Expand All @@ -1198,7 +1197,7 @@ def vl_constraints(n):

def shifts_conservation(n):
vls = n.generators[n.generators.carrier == "virtual_link"]
shifts = n.model["Generator-p"].loc[:, vls.index].sum(dims=["Generator"])
shifts = n.model["Generator-p"].loc[:, vls.index].sum(dim=["Generator"])
# sum of loads shifts across all DC are equal 0 per time period
n.model.add_constraints(shifts == 0, name=f"vl_limit-upper_{name}")

Expand All @@ -1211,8 +1210,8 @@ def DSM_constraints(n):
dsm_delayin = dsm.query("bus0==@name").index
dsm_delayout = dsm.query("bus1==@name").index

delayin = n.model["Link-p"].loc[:, dsm_delayin].sum(dims=["Link"])
delayout = n.model["Link-p"].loc[:, dsm_delayout].sum(dims=["Link"])
delayin = n.model["Link-p"].loc[:, dsm_delayin].sum(dim=["Link"])
delayout = n.model["Link-p"].loc[:, dsm_delayout].sum(dim=["Link"])

load = n.loads_t.p_set[name + " load"]
rhs_up = load * (1 + delta) - load
Expand All @@ -1232,8 +1231,8 @@ def DSM_conservation(n):
dsm_link_delayout = dsm.query("bus0==@name").index
dsm_link_delayin = dsm.query("bus1==@name").index

delayout = n.model["Link-p"].loc[:, dsm_link_delayout].sum(dims=["Link"])
delayin = n.model["Link-p"].loc[:, dsm_link_delayin].sum(dims=["Link"])
delayout = n.model["Link-p"].loc[:, dsm_link_delayout].sum(dim=["Link"])
delayin = n.model["Link-p"].loc[:, dsm_link_delayin].sum(dim=["Link"])

daily_outs = delayout.groupby("snapshot.dayofyear").sum()
daily_ins = delayin.groupby("snapshot.dayofyear").sum()
Expand All @@ -1256,10 +1255,10 @@ def DC_constraints(n):
dsm_delayin = dsm.query("bus0==@name").index
dsm_delayout = dsm.query("bus1==@name").index

snd = n.model["Link-p"].loc[:, vls_snd].sum(dims=["Link"])
rec = n.model["Link-p"].loc[:, vls_rec].sum(dims=["Link"])
delayin = n.model["Link-p"].loc[:, dsm_delayin].sum(dims=["Link"])
delayout = n.model["Link-p"].loc[:, dsm_delayout].sum(dims=["Link"])
snd = n.model["Link-p"].loc[:, vls_snd].sum(dim=["Link"])
rec = n.model["Link-p"].loc[:, vls_rec].sum(dim=["Link"])
delayin = n.model["Link-p"].loc[:, dsm_delayin].sum(dim=["Link"])
delayout = n.model["Link-p"].loc[:, dsm_delayout].sum(dim=["Link"])

load = n.loads_t.p_set[name + " load"]
# requested_load = load + rec - snd
Expand Down Expand Up @@ -1660,7 +1659,6 @@ def create_tuples(locations, values):
# When running via snakemake
n = pypsa.Network(
timescope(year)["network_file"],
override_component_attrs=override_component_attrs(),
)

Nyears = 1 # years in simulation
Expand Down
7 changes: 2 additions & 5 deletions scripts/summarise_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import pypsa, numpy as np, pandas as pd
import yaml
from solve_network import palette
from _helpers import override_component_attrs


def weighted_avg(cfe, weights):
Expand Down Expand Up @@ -618,7 +617,7 @@ def summarise_network(n, policy, tech_palette):

### collect data
data = n.statistics.optimal_capacity(
bus_carrier="AC", groupby=n.statistics.groupers.get_bus_and_carrier
bus_carrier="AC", groupby=["bus", "carrier"]
).round(1)
data = data.droplevel(0)
df_reset = data.reset_index()
Expand Down Expand Up @@ -832,9 +831,7 @@ def summarise_network(n, policy, tech_palette):
print(f"Summary for flexibility: {flexibility}")

# Read data
n = pypsa.Network(
snakemake.input.network, override_component_attrs=override_component_attrs()
)
n = pypsa.Network(snakemake.input.network)

grid_cfe_df = pd.read_csv(
snakemake.input.grid_cfe, index_col=0, parse_dates=True, header=[0, 1]
Expand Down