Skip to content

Commit

Permalink
solve_networks: fix network reference for sector coupled case
Browse files Browse the repository at this point in the history
  • Loading branch information
FabianHofmann committed Aug 28, 2024
1 parent a65e9dc commit 7aded83
Show file tree
Hide file tree
Showing 8 changed files with 66 additions and 63 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ scripts/old
doc/_build
sample_pypsa_eur
test/tmp/

playground/

# Specific configuration files
configs/scenarios/config.*.yaml
Expand Down
9 changes: 7 additions & 2 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -827,7 +827,7 @@ if config["monte_carlo"]["options"].get("add_to_snakefile", False) == False:
solving=config["solving"],
augmented_line_connection=config["augmented_line_connection"],
input:
"networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
network="networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
output:
"results/" + RDIR + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
log:
Expand Down Expand Up @@ -893,7 +893,9 @@ if config["monte_carlo"]["options"].get("add_to_snakefile", False) == True:
solving=config["solving"],
augmented_line_connection=config["augmented_line_connection"],
input:
"networks/" + RDIR + "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{unc}.nc",
network="networks/"
+ RDIR
+ "elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{unc}.nc",
output:
"results/"
+ RDIR
Expand Down Expand Up @@ -1417,6 +1419,9 @@ rule copy_config:
if config["foresight"] == "overnight":

rule solve_sector_network:
params:
solving=config["solving"],
augmented_line_connection=config["augmented_line_connection"],
input:
overrides="data/override_component_attrs",
# network=RESDIR
Expand Down
2 changes: 1 addition & 1 deletion config.tutorial.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
tutorial: true


countries: ["NG", "BJ"]
countries: ["BJ", "NG"]

enable:
build_natura_raster: true
Expand Down
15 changes: 8 additions & 7 deletions scripts/prepare_gas_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -902,6 +902,7 @@ def check_existence(row):
pipelines = prepare_IGGIELGN_data(pipelines)

bus_regions_onshore = load_bus_region(snakemake.input.regions_onshore, pipelines)[0]
bus_regions_onshore.geometry = bus_regions_onshore.geometry.buffer(0)
country_borders = load_bus_region(snakemake.input.regions_onshore, pipelines)[1]

pipelines = parse_states(pipelines, bus_regions_onshore)
Expand All @@ -915,20 +916,20 @@ def check_existence(row):
)

# Conversion of GADM id to from 3 to 2-digit
pipelines["bus0"] = pipelines["bus0"].apply(
lambda id: three_2_two_digits_country(id[:3]) + id[3:]
)
# pipelines["bus0"] = pipelines["bus0"].apply(
# lambda id: three_2_two_digits_country(id[:3]) + id[3:]
# )

pipelines["bus1"] = pipelines["bus1"].apply(
lambda id: three_2_two_digits_country(id[:3]) + id[3:]
)
# pipelines["bus1"] = pipelines["bus1"].apply(
# lambda id: three_2_two_digits_country(id[:3]) + id[3:]
# )

pipelines.to_csv(snakemake.output.clustered_gas_network, index=False)

# TODO: plotting should be a extra rule!
# plot_clustered_gas_network(pipelines, bus_regions_onshore)

average_length = pipelines["length"].mean
average_length = pipelines["length"].mean()
print("average_length = ", average_length)

total_system_capacity = pipelines["GWKm"].sum()
Expand Down
84 changes: 43 additions & 41 deletions scripts/prepare_sector_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -2311,14 +2311,15 @@ def add_dac(n, costs):


def add_services(n, costs):
nhours = n.snapshot_weightings.generators.sum()
buses = spatial.nodes.intersection(n.loads_t.p_set.columns)

profile_residential = normalize_by_country(
n.loads_t.p_set[buses].reindex(columns=spatial.nodes, fill_value=0.0)
).fillna(0)

p_set_elec = p_set_from_scaling(
"services electricity", profile_residential, energy_totals
"services electricity", profile_residential, energy_totals, nhours
)

n.madd(
Expand All @@ -2330,7 +2331,7 @@ def add_services(n, costs):
p_set=p_set_elec,
)
p_set_biomass = p_set_from_scaling(
"services biomass", profile_residential, energy_totals
"services biomass", profile_residential, energy_totals, nhours
)

n.madd(
Expand All @@ -2353,7 +2354,9 @@ def add_services(n, costs):
# carrier="biomass emissions",
# p_set=-co2,
# )
p_set_oil = p_set_from_scaling("services oil", profile_residential, energy_totals)
p_set_oil = p_set_from_scaling(
"services oil", profile_residential, energy_totals, nhours
)

n.madd(
"Load",
Expand All @@ -2375,7 +2378,9 @@ def add_services(n, costs):
p_set=-co2,
)

p_set_gas = p_set_from_scaling("services gas", profile_residential, energy_totals)
p_set_gas = p_set_from_scaling(
"services gas", profile_residential, energy_totals, nhours
)

n.madd(
"Load",
Expand Down Expand Up @@ -2468,12 +2473,16 @@ def normalize_and_group(df, multiindex=False):
)


def p_set_from_scaling(col, scaling, energy_totals):
def p_set_from_scaling(col, scaling, energy_totals, nhours):
"""
Function to create p_set from energy_totals, using the per-unit scaling
dataframe.
"""
return 1e6 * scaling.mul(energy_totals[col], level=0).droplevel(level=0, axis=1)
return (
1e6
/ nhours
* scaling.mul(energy_totals[col], level=0).droplevel(level=0, axis=1)
)


def add_residential(n, costs):
Expand All @@ -2483,33 +2492,19 @@ def add_residential(n, costs):
# heat_demand_index=n.loads_t.p.filter(like='residential').filter(like='heat').dropna(axis=1).index
# oil_res_index=n.loads_t.p.filter(like='residential').filter(like='oil').dropna(axis=1).index

nhours = n.snapshot_weightings.generators.sum()

heat_ind = (
n.loads_t.p_set.filter(like="residential")
.filter(like="heat")
.dropna(axis=1)
.columns
)
oil_ind = (
n.loads_t.p_set.filter(like="residential")
.filter(like="oil")
.dropna(axis=1)
.columns
)
bio_ind = (
n.loads_t.p_set.filter(like="residential")
.filter(like="biomass")
.dropna(axis=1)
.columns
)

gas_ind = (
n.loads_t.p_set.filter(like="residential")
.filter(like="gas")
.dropna(axis=1)
.columns
)

heat_shape_raw = normalize_by_country(n.loads_t.p_set[heat_ind])
heat_shape = heat_shape_raw.rename(
columns=n.loads.bus.map(n.buses.location), level=1
)
heat_shape = heat_shape.T.groupby(level=[0, 1]).sum().T

n.loads_t.p_set[heat_ind] = 1e6 * heat_shape_raw.mul(
energy_totals["total residential space"]
Expand All @@ -2518,39 +2513,44 @@ def add_residential(n, costs):
- energy_totals["residential heat oil"]
- energy_totals["residential heat gas"],
level=0,
).droplevel(level=0, axis=1)

heat_shape = group_by_node(heat_shape_raw.droplevel(0, axis=True), multiindex=True)
).droplevel(level=0, axis=1).div(nhours)

heat_oil_demand = p_set_from_scaling(
"residential heat oil", heat_shape, energy_totals
"residential heat oil", heat_shape, energy_totals, nhours
)

heat_biomass_demand = p_set_from_scaling(
"residential heat biomass", heat_shape, energy_totals
"residential heat biomass", heat_shape, energy_totals, nhours
)

heat_gas_demand = p_set_from_scaling(
"residential heat gas", heat_shape, energy_totals
"residential heat gas", heat_shape, energy_totals, nhours
)

res_index = spatial.nodes.intersection(n.loads_t.p_set.columns)
profile_residential = normalize_and_group(
n.loads_t.p_set[res_index], multiindex=True
).fillna(0)
profile_residential_raw = normalize_by_country(n.loads_t.p_set[res_index])
profile_residential = profile_residential_raw.rename(
columns=n.loads.bus.map(n.buses.location), level=1
)
profile_residential = profile_residential.T.groupby(level=[0, 1]).sum().T

p_set_oil = (
p_set_from_scaling("residential oil", profile_residential, energy_totals)
p_set_from_scaling(
"residential oil", profile_residential, energy_totals, nhours
)
+ heat_oil_demand
)

p_set_biomass = (
p_set_from_scaling("residential biomass", profile_residential, energy_totals)
p_set_from_scaling(
"residential biomass", profile_residential, energy_totals, nhours
)
+ heat_biomass_demand
)

p_set_gas = (
p_set_from_scaling("residential gas", profile_residential, energy_totals)
p_set_from_scaling(
"residential gas", profile_residential, energy_totals, nhours
)
+ heat_gas_demand
)

Expand Down Expand Up @@ -2618,15 +2618,17 @@ def add_residential(n, costs):
n.loads_t.p_set.filter(like=country)[heat_buses].sum().sum(),
)
n.loads_t.p_set.loc[:, heat_buses] = np.where(
~np.isnan(safe_division), safe_division * rem_heat_demand * 1e6, 0.0
~np.isnan(safe_division),
safe_division * rem_heat_demand * 1e6 / nhours,
0.0,
)

# Revise residential electricity demand
buses = n.buses[n.buses.carrier == "AC"].index.intersection(n.loads_t.p_set.columns)

profile_pu = normalize_by_country(n.loads_t.p_set[buses]).fillna(0)
n.loads_t.p_set.loc[:, buses] = p_set_from_scaling(
"electricity residential", profile_pu, energy_totals
"electricity residential", profile_pu, energy_totals, nhours
)


Expand Down
11 changes: 3 additions & 8 deletions scripts/solve_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -575,11 +575,6 @@ def add_h2_network_cap(n, cap):
return
h2_network_cap = get_var(n, "Link", "p_nom")
subset_index = h2_network.index.intersection(h2_network_cap.index)
diff_index = h2_network_cap.index.difference(subset_index)
if len(diff_index) > 0:
logger.warning(
f"Impossible to set H2 cap for the following links: {diff_index}"
)
lhs = linexpr(
(h2_network.loc[subset_index, "length"], h2_network_cap[subset_index])
).sum()
Expand Down Expand Up @@ -1012,13 +1007,13 @@ def solve_network(n, config, solving={}, opts="", **kwargs):
opts = snakemake.wildcards.opts.split("-")
solving = snakemake.params.solving

is_sector_coupled = "sopts" in snakemake.wildcards
is_sector_coupled = "sopts" in snakemake.wildcards.keys()

if is_sector_coupled:
overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(snakemake.input[0], override_component_attrs=overrides)
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
else:
n = pypsa.Network(snakemake.input[0])
n = pypsa.Network(snakemake.input.network)

if snakemake.params.augmented_line_connection.get("add_to_snakefile"):
n.lines.loc[n.lines.index.str.contains("new"), "s_nom_min"] = (
Expand Down
4 changes: 2 additions & 2 deletions test/config.test1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ scenario:
ll:
- "c1"

countries: ['NG', 'BJ']
countries: ["NG", "BJ"]


electricity:
Expand Down Expand Up @@ -50,4 +50,4 @@ snapshots:

solving:
solver:
name: cbc
name: gurobi
2 changes: 1 addition & 1 deletion test/config.test_myopic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ policy_config:
cluster_options:
alternative_clustering: true

countries: ['NG', 'BJ']
countries: ['BJ', 'NG']

demand_data:
update_data: true # if true, the workflow downloads the energy balances data saved in data/demand/unsd/data again. Turn on for the first run.
Expand Down

0 comments on commit 7aded83

Please sign in to comment.