geothermal_district_heating()   A
last analyzed

Complexity

Conditions 2

Size

Total Lines 40
Code Lines 16

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 16
dl 0
loc 40
rs 9.6
c 0
b 0
f 0
cc 2
nop 1
1
"""The central module containing all code dealing with importing data from
2
the pysa-eur-sec scenario parameter creation
3
"""
4
5
from pathlib import Path
6
from urllib.request import urlretrieve
7
import json
8
import shutil
9
10
from shapely.geometry import LineString
11
import geopandas as gpd
12
import importlib_resources as resources
13
import numpy as np
14
import pandas as pd
15
import pypsa
16
import requests
17
import yaml
18
19
from egon.data import __path__, config, db, logger
20
from egon.data.datasets import Dataset
21
from egon.data.datasets.scenario_parameters import get_sector_parameters
22
from egon.data.datasets.scenario_parameters.parameters import (
23
    annualize_capital_costs,
24
)
25
import egon.data.config
26
import egon.data.subprocess as subproc
27
28
29
class PreparePypsaEur(Dataset):
30
    def __init__(self, dependencies):
31
        super().__init__(
32
            name="PreparePypsaEur",
33
            version="0.0.42",
34
            dependencies=dependencies,
35
            tasks=(
36
                download,
37
                prepare_network,
38
            ),
39
        )
40
41
42
class RunPypsaEur(Dataset):
43
    def __init__(self, dependencies):
44
        super().__init__(
45
            name="SolvePypsaEur",
46
            version="0.0.41",
47
            dependencies=dependencies,
48
            tasks=(
49
                prepare_network_2,
50
                execute,
51
                solve_network,
52
                clean_database,
53
                electrical_neighbours_egon100,
54
                # Dropped until we decided how we deal with the H2 grid
55
                # overwrite_H2_pipeline_share,
56
            ),
57
        )
58
59
60
def download():
61
    cwd = Path(".")
62
    filepath = cwd / "run-pypsa-eur"
63
    filepath.mkdir(parents=True, exist_ok=True)
64
65
    pypsa_eur_repos = filepath / "pypsa-eur"
66
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
67
        if not pypsa_eur_repos.exists():
68
            subproc.run(
69
                [
70
                    "git",
71
                    "clone",
72
                    "--branch",
73
                    "master",
74
                    "https://github.com/PyPSA/pypsa-eur.git",
75
                    pypsa_eur_repos,
76
                ]
77
            )
78
79
            subproc.run(
80
                [
81
                    "git",
82
                    "checkout",
83
                    "2119f4cee05c256509f48d4e9fe0d8fd9e9e3632",
84
                ],
85
                cwd=pypsa_eur_repos,
86
            )
87
88
            # Add gurobi solver to environment:
89
            # Read YAML file
90
            # path_to_env = pypsa_eur_repos / "envs" / "environment.yaml"
91
            # with open(path_to_env, "r") as stream:
92
            #    env = yaml.safe_load(stream)
93
94
            # The version of gurobipy has to fit to the version of gurobi.
95
            # Since we mainly use gurobi 10.0 this is set here.
96
            # env["dependencies"][-1]["pip"].append("gurobipy==10.0.0")
97
98
            # Set python version to <3.12
99
            # Python<=3.12 needs gurobipy>=11.0, in case gurobipy is updated,
100
            # this can be removed
101
            # env["dependencies"] = [
102
            #    "python>=3.8,<3.12" if x == "python>=3.8" else x
103
            #    for x in env["dependencies"]
104
            # ]
105
106
            # Limit geopandas version
107
            # our pypsa-eur version is not compatible to geopandas>1
108
            # env["dependencies"] = [
109
            #    "geopandas>=0.11.0,<1" if x == "geopandas>=0.11.0" else x
110
            #    for x in env["dependencies"]
111
            # ]
112
113
            # Write YAML file
114
            # with open(path_to_env, "w", encoding="utf8") as outfile:
115
            #    yaml.dump(
116
            #        env, outfile, default_flow_style=False, allow_unicode=True
117
            #    )
118
119
            # Copy config file for egon-data to pypsa-eur directory
120
            shutil.copy(
121
                Path(
122
                    __path__[0], "datasets", "pypsaeur", "config_prepare.yaml"
123
                ),
124
                pypsa_eur_repos / "config" / "config.yaml",
125
            )
126
127
            # Copy custom_extra_functionality.py file for egon-data to pypsa-eur directory
128
            shutil.copy(
129
                Path(
130
                    __path__[0],
131
                    "datasets",
132
                    "pypsaeur",
133
                    "custom_extra_functionality.py",
134
                ),
135
                pypsa_eur_repos / "data",
136
            )
137
138
            with open(filepath / "Snakefile", "w") as snakefile:
139
                snakefile.write(
140
                    resources.read_text(
141
                        "egon.data.datasets.pypsaeur", "Snakefile"
142
                    )
143
                )
144
145
        # Copy era5 weather data to folder for pypsaeur
146
        era5_pypsaeur_path = filepath / "pypsa-eur" / "cutouts"
147
148
        if not era5_pypsaeur_path.exists():
149
            era5_pypsaeur_path.mkdir(parents=True, exist_ok=True)
150
            copy_from = config.datasets()["era5_weather_data"]["targets"][
151
                "weather_data"
152
            ]["path"]
153
            filename = "europe-2011-era5.nc"
154
            shutil.copy(
155
                copy_from + "/" + filename, era5_pypsaeur_path / filename
156
            )
157
158
        # Workaround to download natura, shipdensity and globalenergymonitor
159
        # data, which is not working in the regular snakemake workflow.
160
        # The same files are downloaded from the same directory as in pypsa-eur
161
        # version 0.10 here. Is is stored in the folders from pypsa-eur.
162
        if not (filepath / "pypsa-eur" / "resources").exists():
163
            (filepath / "pypsa-eur" / "resources").mkdir(
164
                parents=True, exist_ok=True
165
            )
166
        urlretrieve(
167
            "https://zenodo.org/record/4706686/files/natura.tiff",
168
            filepath / "pypsa-eur" / "resources" / "natura.tiff",
169
        )
170
171
        if not (filepath / "pypsa-eur" / "data").exists():
172
            (filepath / "pypsa-eur" / "data").mkdir(
173
                parents=True, exist_ok=True
174
            )
175
        urlretrieve(
176
            "https://zenodo.org/record/13757228/files/shipdensity_global.zip",
177
            filepath / "pypsa-eur" / "data" / "shipdensity_global.zip",
178
        )
179
180
        if not (
181
            filepath
182
            / "pypsa-eur"
183
            / "zenodo.org"
184
            / "records"
185
            / "13757228"
186
            / "files"
187
        ).exists():
188
            (
189
                filepath
190
                / "pypsa-eur"
191
                / "zenodo.org"
192
                / "records"
193
                / "13757228"
194
                / "files"
195
            ).mkdir(parents=True, exist_ok=True)
196
197
        urlretrieve(
198
            "https://zenodo.org/records/10356004/files/ENSPRESO_BIOMASS.xlsx",
199
            filepath
200
            / "pypsa-eur"
201
            / "zenodo.org"
202
            / "records"
203
            / "13757228"
204
            / "files"
205
            / "ENSPRESO_BIOMASS.xlsx",
206
        )
207
208
        if not (filepath / "pypsa-eur" / "data" / "gem").exists():
209
            (filepath / "pypsa-eur" / "data" / "gem").mkdir(
210
                parents=True, exist_ok=True
211
            )
212
213
        r = requests.get(
214
            "https://tubcloud.tu-berlin.de/s/LMBJQCsN6Ez5cN2/download/"
215
            "Europe-Gas-Tracker-2024-05.xlsx"
216
        )
217
        with open(
218
            filepath
219
            / "pypsa-eur"
220
            / "data"
221
            / "gem"
222
            / "Europe-Gas-Tracker-2024-05.xlsx",
223
            "wb",
224
        ) as outfile:
225
            outfile.write(r.content)
226
227
        if not (filepath / "pypsa-eur" / "data" / "gem").exists():
228
            (filepath / "pypsa-eur" / "data" / "gem").mkdir(
229
                parents=True, exist_ok=True
230
            )
231
232
        r = requests.get(
233
            "https://tubcloud.tu-berlin.de/s/Aqebo3rrQZWKGsG/download/"
234
            "Global-Steel-Plant-Tracker-April-2024-Standard-Copy-V1.xlsx"
235
        )
236
        with open(
237
            filepath
238
            / "pypsa-eur"
239
            / "data"
240
            / "gem"
241
            / "Global-Steel-Plant-Tracker-April-2024-Standard-Copy-V1.xlsx",
242
            "wb",
243
        ) as outfile:
244
            outfile.write(r.content)
245
246
    else:
247
        print("Pypsa-eur is not executed due to the settings of egon-data")
248
249
250
def prepare_network():
251
    cwd = Path(".")
252
    filepath = cwd / "run-pypsa-eur"
253
254
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
255
        subproc.run(
256
            [
257
                "snakemake",
258
                "-j1",
259
                "--directory",
260
                filepath,
261
                "--snakefile",
262
                filepath / "Snakefile",
263
                "--use-conda",
264
                "--conda-frontend=conda",
265
                "--cores",
266
                "8",
267
                "prepare",
268
            ]
269
        )
270
        execute()
271
272
        path = filepath / "pypsa-eur" / "results" / "prenetworks"
273
274
        path_2 = path / "prenetwork_post-manipulate_pre-solve"
275
        path_2.mkdir(parents=True, exist_ok=True)
276
277
        with open(
278
            __path__[0] + "/datasets/pypsaeur/config_prepare.yaml", "r"
279
        ) as stream:
280
            data_config = yaml.safe_load(stream)
281
282
        for i in range(0, len(data_config["scenario"]["planning_horizons"])):
283
            nc_file = (
284
                f"base_s_{data_config['scenario']['clusters'][0]}"
285
                f"_l{data_config['scenario']['ll'][0]}"
286
                f"_{data_config['scenario']['opts'][0]}"
287
                f"_{data_config['scenario']['sector_opts'][0]}"
288
                f"_{data_config['scenario']['planning_horizons'][i]}.nc"
289
            )
290
291
            shutil.copy(Path(path, nc_file), path_2)
292
293
    else:
294
        print("Pypsa-eur is not executed due to the settings of egon-data")
295
296
297
def prepare_network_2():
298
    cwd = Path(".")
299
    filepath = cwd / "run-pypsa-eur"
300
301
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
302
        shutil.copy(
303
            Path(__path__[0], "datasets", "pypsaeur", "config_solve.yaml"),
304
            filepath / "pypsa-eur" / "config" / "config.yaml",
305
        )
306
307
        subproc.run(
308
            [
309
                "snakemake",
310
                "-j1",
311
                "--directory",
312
                filepath,
313
                "--snakefile",
314
                filepath / "Snakefile",
315
                "--use-conda",
316
                "--conda-frontend=conda",
317
                "--cores",
318
                "8",
319
                "prepare",
320
            ]
321
        )
322
    else:
323
        print("Pypsa-eur is not executed due to the settings of egon-data")
324
325
326
def solve_network():
327
    cwd = Path(".")
328
    filepath = cwd / "run-pypsa-eur"
329
330
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
331
        subproc.run(
332
            [
333
                "snakemake",
334
                "-j1",
335
                "--cores",
336
                "8",
337
                "--directory",
338
                filepath,
339
                "--snakefile",
340
                filepath / "Snakefile",
341
                "--use-conda",
342
                "--conda-frontend=conda",
343
                "solve",
344
            ]
345
        )
346
347
        postprocessing_biomass_2045()
348
349
        subproc.run(
350
            [
351
                "snakemake",
352
                "-j1",
353
                "--directory",
354
                filepath,
355
                "--snakefile",
356
                filepath / "Snakefile",
357
                "--use-conda",
358
                "--conda-frontend=conda",
359
                "summary",
360
            ]
361
        )
362
    else:
363
        print("Pypsa-eur is not executed due to the settings of egon-data")
364
365
366 View Code Duplication
def read_network(planning_horizon=3):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
367
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
368
        with open(
369
            __path__[0] + "/datasets/pypsaeur/config_solve.yaml", "r"
370
        ) as stream:
371
            data_config = yaml.safe_load(stream)
372
373
        target_file = (
374
            Path(".")
375
            / "run-pypsa-eur"
376
            / "pypsa-eur"
377
            / "results"
378
            / data_config["run"]["name"]
379
            / "postnetworks"
380
            / f"base_s_{data_config['scenario']['clusters'][0]}"
381
            f"_l{data_config['scenario']['ll'][0]}"
382
            f"_{data_config['scenario']['opts'][0]}"
383
            f"_{data_config['scenario']['sector_opts'][0]}"
384
            f"_{data_config['scenario']['planning_horizons'][planning_horizon]}.nc"
385
        )
386
387
    else:
388
        target_file = (
389
            Path(".")
390
            / "data_bundle_egon_data"
391
            / "pypsa_eur"
392
            / "postnetworks"
393
            / "base_s_39_lc1.25__cb40ex0-T-H-I-B-solar+p3-dist1_2045.nc"
394
        )
395
396
    return pypsa.Network(target_file)
397
398
399
def clean_database():
400
    """Remove all components abroad for eGon100RE of the database
401
402
    Remove all components abroad and their associated time series of
403
    the datase for the scenario 'eGon100RE'.
404
405
    Parameters
406
    ----------
407
    None
408
409
    Returns
410
    -------
411
    None
412
413
    """
414
    scn_name = "eGon100RE"
415
416
    comp_one_port = ["load", "generator", "store", "storage"]
417
418
    # delete existing components and associated timeseries
419
    for comp in comp_one_port:
420
        db.execute_sql(
421
            f"""
422
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
423
            WHERE {comp + "_id"} IN (
424
                SELECT {comp + "_id"} FROM {"grid.egon_etrago_" + comp}
425
                WHERE bus IN (
426
                    SELECT bus_id FROM grid.egon_etrago_bus
427
                    WHERE country != 'DE'
428
                    AND scn_name = '{scn_name}')
429
                AND scn_name = '{scn_name}'
430
            );
431
432
            DELETE FROM {"grid.egon_etrago_" + comp}
433
            WHERE bus IN (
434
                SELECT bus_id FROM grid.egon_etrago_bus
435
                WHERE country != 'DE'
436
                AND scn_name = '{scn_name}')
437
            AND scn_name = '{scn_name}';"""
438
        )
439
440
    comp_2_ports = [
441
        "line",
442
        "link",
443
    ]
444
445
    for comp, id in zip(comp_2_ports, ["line_id", "link_id"]):
446
        db.execute_sql(
447
            f"""
448
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
449
            WHERE scn_name = '{scn_name}'
450
            AND {id} IN (
451
                SELECT {id} FROM {"grid.egon_etrago_" + comp}
452
            WHERE "bus0" IN (
453
            SELECT bus_id FROM grid.egon_etrago_bus
454
                WHERE country != 'DE'
455
                AND scn_name = '{scn_name}'
456
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
457
            AND "bus1" IN (
458
            SELECT bus_id FROM grid.egon_etrago_bus
459
                WHERE country != 'DE'
460
                AND scn_name = '{scn_name}'
461
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
462
            );
463
464
465
            DELETE FROM {"grid.egon_etrago_" + comp}
466
            WHERE scn_name = '{scn_name}'
467
            AND "bus0" IN (
468
            SELECT bus_id FROM grid.egon_etrago_bus
469
                WHERE country != 'DE'
470
                AND scn_name = '{scn_name}'
471
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
472
            AND "bus1" IN (
473
            SELECT bus_id FROM grid.egon_etrago_bus
474
                WHERE country != 'DE'
475
                AND scn_name = '{scn_name}'
476
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
477
            ;"""
478
        )
479
480
    db.execute_sql(
481
        f"""
482
        DELETE FROM grid.egon_etrago_bus
483
        WHERE scn_name = '{scn_name}'
484
        AND country <> 'DE'
485
        AND carrier <> 'AC'
486
        """
487
    )
488
489
490
def electrical_neighbours_egon100():
491
    if "eGon100RE" in egon.data.config.settings()["egon-data"]["--scenarios"]:
492
        neighbor_reduction()
493
494
    else:
495
        print(
496
            "eGon100RE is not in the list of created scenarios, this task is skipped."
497
        )
498
499
500
def combine_decentral_and_rural_heat(network_solved, network_prepared):
501
502
    for comp in network_solved.iterate_components():
503
504
        if comp.name in ["Bus", "Link", "Store"]:
505
            urban_decentral = comp.df[
506
                comp.df.carrier.str.contains("urban decentral")
507
            ]
508
            rural = comp.df[comp.df.carrier.str.contains("rural")]
509
            for i, row in urban_decentral.iterrows():
510
                if not "DE" in i:
511
                    if comp.name in ["Bus"]:
512
                        network_solved.remove("Bus", i)
513
                    if comp.name in ["Link", "Generator"]:
514
                        if (
515
                            i.replace("urban decentral", "rural")
516
                            in rural.index
517
                        ):
518
                            rural.loc[
519
                                i.replace("urban decentral", "rural"),
520
                                "p_nom_opt",
521
                            ] += urban_decentral.loc[i, "p_nom_opt"]
522
                            rural.loc[
523
                                i.replace("urban decentral", "rural"), "p_nom"
524
                            ] += urban_decentral.loc[i, "p_nom"]
525
                            network_solved.remove(comp.name, i)
526
                        else:
527
                            print(i)
528
                            comp.df.loc[i, "bus0"] = comp.df.loc[
529
                                i, "bus0"
530
                            ].replace("urban decentral", "rural")
531
                            comp.df.loc[i, "bus1"] = comp.df.loc[
532
                                i, "bus1"
533
                            ].replace("urban decentral", "rural")
534
                            comp.df.loc[i, "carrier"] = comp.df.loc[
535
                                i, "carrier"
536
                            ].replace("urban decentral", "rural")
537
                    if comp.name in ["Store"]:
538
                        if (
539
                            i.replace("urban decentral", "rural")
540
                            in rural.index
541
                        ):
542
                            rural.loc[
543
                                i.replace("urban decentral", "rural"),
544
                                "e_nom_opt",
545
                            ] += urban_decentral.loc[i, "e_nom_opt"]
546
                            rural.loc[
547
                                i.replace("urban decentral", "rural"), "e_nom"
548
                            ] += urban_decentral.loc[i, "e_nom"]
549
                            network_solved.remove(comp.name, i)
550
551
                        else:
552
                            print(i)
553
                            network_solved.stores.loc[i, "bus"] = (
554
                                network_solved.stores.loc[i, "bus"].replace(
555
                                    "urban decentral", "rural"
556
                                )
557
                            )
558
                            network_solved.stores.loc[i, "carrier"] = (
559
                                "rural water tanks"
560
                            )
561
562
    urban_decentral_loads = network_prepared.loads[
563
        network_prepared.loads.carrier.str.contains("urban decentral")
564
    ]
565
566
    for i, row in urban_decentral_loads.iterrows():
567
        if i in network_prepared.loads_t.p_set.columns:
568
            network_prepared.loads_t.p_set[
569
                i.replace("urban decentral", "rural")
570
            ] += network_prepared.loads_t.p_set[i]
571
    network_prepared.mremove("Load", urban_decentral_loads.index)
572
573
    return network_prepared, network_solved
574
575
576
def neighbor_reduction():
577
    network_solved = read_network()
578
    network_prepared = prepared_network(planning_horizon="2045")
579
580
    # network.links.drop("pipe_retrofit", axis="columns", inplace=True)
581
582
    wanted_countries = [
583
        "DE",
584
        "AT",
585
        "CH",
586
        "CZ",
587
        "PL",
588
        "SE",
589
        "NO",
590
        "DK",
591
        "GB",
592
        "NL",
593
        "BE",
594
        "FR",
595
        "LU",
596
    ]
597
598
    foreign_buses = network_solved.buses[
599
        (~network_solved.buses.index.str.contains("|".join(wanted_countries)))
600
        | (network_solved.buses.index.str.contains("FR6"))
601
    ]
602
    network_solved.buses = network_solved.buses.drop(
603
        network_solved.buses.loc[foreign_buses.index].index
604
    )
605
606
    # Add H2 demand of Fischer-Tropsch process and methanolisation
607
    # to industrial H2 demands
608
    industrial_hydrogen = network_prepared.loads.loc[
609
        network_prepared.loads.carrier == "H2 for industry"
610
    ]
611
    fischer_tropsch = (
612
        network_solved.links_t.p0[
613
            network_solved.links.loc[
614
                network_solved.links.carrier == "Fischer-Tropsch"
615
            ].index
616
        ]
617
        .mul(network_solved.snapshot_weightings.generators, axis=0)
618
        .sum()
619
    )
620
    methanolisation = (
621
        network_solved.links_t.p0[
622
            network_solved.links.loc[
623
                network_solved.links.carrier == "methanolisation"
624
            ].index
625
        ]
626
        .mul(network_solved.snapshot_weightings.generators, axis=0)
627
        .sum()
628
    )
629
    for i, row in industrial_hydrogen.iterrows():
630
        network_prepared.loads.loc[i, "p_set"] += (
631
            fischer_tropsch[
632
                fischer_tropsch.index.str.startswith(row.bus[:5])
633
            ].sum()
634
            / 8760
635
        )
636
        network_prepared.loads.loc[i, "p_set"] += (
637
            methanolisation[
638
                methanolisation.index.str.startswith(row.bus[:5])
639
            ].sum()
640
            / 8760
641
        )
642
    # drop foreign lines and links from the 2nd row
643
644
    network_solved.lines = network_solved.lines.drop(
645
        network_solved.lines[
646
            (
647
                network_solved.lines["bus0"].isin(network_solved.buses.index)
648
                == False
649
            )
650
            & (
651
                network_solved.lines["bus1"].isin(network_solved.buses.index)
652
                == False
653
            )
654
        ].index
655
    )
656
657
    # select all lines which have at bus1 the bus which is kept
658
    lines_cb_1 = network_solved.lines[
659
        (
660
            network_solved.lines["bus0"].isin(network_solved.buses.index)
661
            == False
662
        )
663
    ]
664
665
    # create a load at bus1 with the line's hourly loading
666
    for i, k in zip(lines_cb_1.bus1.values, lines_cb_1.index):
667
668
        # Copy loading of lines into hourly resolution
669
        pset = pd.Series(
670
            index=network_prepared.snapshots,
671
            data=network_solved.lines_t.p1[k].resample("H").ffill(),
672
        )
673
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
674
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
675
676
        # Loads are all imported from the prepared network in the end
677
        network_prepared.add(
678
            "Load",
679
            "slack_fix " + i + " " + k,
680
            bus=i,
681
            p_set=pset,
682
            carrier=lines_cb_1.loc[k, "carrier"],
683
        )
684
685
    # select all lines which have at bus0 the bus which is kept
686
    lines_cb_0 = network_solved.lines[
687
        (
688
            network_solved.lines["bus1"].isin(network_solved.buses.index)
689
            == False
690
        )
691
    ]
692
693
    # create a load at bus0 with the line's hourly loading
694
    for i, k in zip(lines_cb_0.bus0.values, lines_cb_0.index):
695
        # Copy loading of lines into hourly resolution
696
        pset = pd.Series(
697
            index=network_prepared.snapshots,
698
            data=network_solved.lines_t.p0[k].resample("H").ffill(),
699
        )
700
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
701
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
702
703
        network_prepared.add(
704
            "Load",
705
            "slack_fix " + i + " " + k,
706
            bus=i,
707
            p_set=pset,
708
            carrier=lines_cb_0.loc[k, "carrier"],
709
        )
710
711
    # do the same for links
712
    network_solved.mremove(
713
        "Link",
714
        network_solved.links[
715
            (~network_solved.links.bus0.isin(network_solved.buses.index))
716
            | (~network_solved.links.bus1.isin(network_solved.buses.index))
717
        ].index,
718
    )
719
720
    # select all links which have at bus1 the bus which is kept
721
    links_cb_1 = network_solved.links[
722
        (
723
            network_solved.links["bus0"].isin(network_solved.buses.index)
724
            == False
725
        )
726
    ]
727
728
    # create a load at bus1 with the link's hourly loading
729
    for i, k in zip(links_cb_1.bus1.values, links_cb_1.index):
730
        pset = pd.Series(
731
            index=network_prepared.snapshots,
732
            data=network_solved.links_t.p1[k].resample("H").ffill(),
733
        )
734
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
735
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
736
737
        network_prepared.add(
738
            "Load",
739
            "slack_fix_links " + i + " " + k,
740
            bus=i,
741
            p_set=pset,
742
            carrier=links_cb_1.loc[k, "carrier"],
743
        )
744
745
    # select all links which have at bus0 the bus which is kept
746
    links_cb_0 = network_solved.links[
747
        (
748
            network_solved.links["bus1"].isin(network_solved.buses.index)
749
            == False
750
        )
751
    ]
752
753
    # create a load at bus0 with the link's hourly loading
754
    for i, k in zip(links_cb_0.bus0.values, links_cb_0.index):
755
        pset = pd.Series(
756
            index=network_prepared.snapshots,
757
            data=network_solved.links_t.p0[k].resample("H").ffill(),
758
        )
759
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
760
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
761
762
        network_prepared.add(
763
            "Load",
764
            "slack_fix_links " + i + " " + k,
765
            bus=i,
766
            p_set=pset,
767
            carrier=links_cb_0.carrier[k],
768
        )
769
770
    # drop remaining foreign components
771
    for comp in network_solved.iterate_components():
772
        if "bus0" in comp.df.columns:
773
            network_solved.mremove(
774
                comp.name,
775
                comp.df[~comp.df.bus0.isin(network_solved.buses.index)].index,
776
            )
777
            network_solved.mremove(
778
                comp.name,
779
                comp.df[~comp.df.bus1.isin(network_solved.buses.index)].index,
780
            )
781
        elif "bus" in comp.df.columns:
782
            network_solved.mremove(
783
                comp.name,
784
                comp.df[~comp.df.bus.isin(network_solved.buses.index)].index,
785
            )
786
787
    # Combine urban decentral and rural heat
788
    network_prepared, network_solved = combine_decentral_and_rural_heat(
789
        network_solved, network_prepared
790
    )
791
792
    # writing components of neighboring countries to etrago tables
793
794
    # Set country tag for all buses
795
    network_solved.buses.country = network_solved.buses.index.str[:2]
796
    neighbors = network_solved.buses[network_solved.buses.country != "DE"]
797
798
    neighbors["new_index"] = (
799
        db.next_etrago_id("bus") + neighbors.reset_index().index
800
    )
801
802
    # Use index of AC buses created by electrical_neigbors
803
    foreign_ac_buses = db.select_dataframe(
804
        """
805
        SELECT * FROM grid.egon_etrago_bus
806
        WHERE carrier = 'AC' AND v_nom = 380
807
        AND country!= 'DE' AND scn_name ='eGon100RE'
808
        AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data)
809
        """
810
    )
811
    buses_with_defined_id = neighbors[
812
        (neighbors.carrier == "AC")
813
        & (neighbors.country.isin(foreign_ac_buses.country.values))
814
    ].index
815
    neighbors.loc[buses_with_defined_id, "new_index"] = (
816
        foreign_ac_buses.set_index("x")
817
        .loc[neighbors.loc[buses_with_defined_id, "x"]]
818
        .bus_id.values
819
    )
820
821
    # lines, the foreign crossborder lines
822
    # (without crossborder lines to Germany!)
823
824
    neighbor_lines = network_solved.lines[
825
        network_solved.lines.bus0.isin(neighbors.index)
826
        & network_solved.lines.bus1.isin(neighbors.index)
827
    ]
828
    if not network_solved.lines_t["s_max_pu"].empty:
829
        neighbor_lines_t = network_prepared.lines_t["s_max_pu"][
830
            neighbor_lines.index
831
        ]
832
833
    neighbor_lines.reset_index(inplace=True)
834
    neighbor_lines.bus0 = (
835
        neighbors.loc[neighbor_lines.bus0, "new_index"].reset_index().new_index
836
    )
837
    neighbor_lines.bus1 = (
838
        neighbors.loc[neighbor_lines.bus1, "new_index"].reset_index().new_index
839
    )
840
    neighbor_lines.index += db.next_etrago_id("line")
841
842
    if not network_solved.lines_t["s_max_pu"].empty:
843
        for i in neighbor_lines_t.columns:
0 ignored issues
show
introduced by
The variable neighbor_lines_t does not seem to be defined in case BooleanNotNode on line 828 is False. Are you sure this can never be the case?
Loading history...
844
            new_index = neighbor_lines[neighbor_lines["name"] == i].index
845
            neighbor_lines_t.rename(columns={i: new_index[0]}, inplace=True)
846
847
    # links
848
    neighbor_links = network_solved.links[
849
        network_solved.links.bus0.isin(neighbors.index)
850
        & network_solved.links.bus1.isin(neighbors.index)
851
    ]
852
853
    neighbor_links.reset_index(inplace=True)
854
    neighbor_links.bus0 = (
855
        neighbors.loc[neighbor_links.bus0, "new_index"].reset_index().new_index
856
    )
857
    neighbor_links.bus1 = (
858
        neighbors.loc[neighbor_links.bus1, "new_index"].reset_index().new_index
859
    )
860
    neighbor_links.index += db.next_etrago_id("link")
861
862
    # generators
863
    neighbor_gens = network_solved.generators[
864
        network_solved.generators.bus.isin(neighbors.index)
865
    ]
866
    neighbor_gens_t = network_prepared.generators_t["p_max_pu"][
867
        neighbor_gens[
868
            neighbor_gens.index.isin(
869
                network_prepared.generators_t["p_max_pu"].columns
870
            )
871
        ].index
872
    ]
873
874
    gen_time = [
875
        "solar",
876
        "onwind",
877
        "solar rooftop",
878
        "offwind-ac",
879
        "offwind-dc",
880
        "solar-hsat",
881
        "urban central solar thermal",
882
        "rural solar thermal",
883
        "offwind-float",
884
    ]
885
886
    missing_gent = neighbor_gens[
887
        neighbor_gens["carrier"].isin(gen_time)
888
        & ~neighbor_gens.index.isin(neighbor_gens_t.columns)
889
    ].index
890
891
    gen_timeseries = network_prepared.generators_t["p_max_pu"].copy()
892
    for mgt in missing_gent:  # mgt: missing generator timeseries
893
        try:
894
            neighbor_gens_t[mgt] = gen_timeseries.loc[:, mgt[0:-5]]
895
        except:
896
            print(f"There are not timeseries for {mgt}")
897
898
    neighbor_gens.reset_index(inplace=True)
899
    neighbor_gens.bus = (
900
        neighbors.loc[neighbor_gens.bus, "new_index"].reset_index().new_index
901
    )
902
    neighbor_gens.index += db.next_etrago_id("generator")
903
904
    for i in neighbor_gens_t.columns:
905
        new_index = neighbor_gens[neighbor_gens["Generator"] == i].index
906
        neighbor_gens_t.rename(columns={i: new_index[0]}, inplace=True)
907
908
    # loads
909
    # imported from prenetwork in 1h-resolution
910
    neighbor_loads = network_prepared.loads[
911
        network_prepared.loads.bus.isin(neighbors.index)
912
    ]
913
    neighbor_loads_t_index = neighbor_loads.index[
914
        neighbor_loads.index.isin(network_prepared.loads_t.p_set.columns)
915
    ]
916
    neighbor_loads_t = network_prepared.loads_t["p_set"][
917
        neighbor_loads_t_index
918
    ]
919
920
    neighbor_loads.reset_index(inplace=True)
921
    neighbor_loads.bus = (
922
        neighbors.loc[neighbor_loads.bus, "new_index"].reset_index().new_index
923
    )
924
    neighbor_loads.index += db.next_etrago_id("load")
925
926
    for i in neighbor_loads_t.columns:
927
        new_index = neighbor_loads[neighbor_loads["Load"] == i].index
928
        neighbor_loads_t.rename(columns={i: new_index[0]}, inplace=True)
929
930
    # stores
931
    neighbor_stores = network_solved.stores[
932
        network_solved.stores.bus.isin(neighbors.index)
933
    ]
934
    neighbor_stores_t_index = neighbor_stores.index[
935
        neighbor_stores.index.isin(network_solved.stores_t.e_min_pu.columns)
936
    ]
937
    neighbor_stores_t = network_prepared.stores_t["e_min_pu"][
938
        neighbor_stores_t_index
939
    ]
940
941
    neighbor_stores.reset_index(inplace=True)
942
    neighbor_stores.bus = (
943
        neighbors.loc[neighbor_stores.bus, "new_index"].reset_index().new_index
944
    )
945
    neighbor_stores.index += db.next_etrago_id("store")
946
947
    for i in neighbor_stores_t.columns:
948
        new_index = neighbor_stores[neighbor_stores["Store"] == i].index
949
        neighbor_stores_t.rename(columns={i: new_index[0]}, inplace=True)
950
951
    # storage_units
952
    neighbor_storage = network_solved.storage_units[
953
        network_solved.storage_units.bus.isin(neighbors.index)
954
    ]
955
    neighbor_storage_t_index = neighbor_storage.index[
956
        neighbor_storage.index.isin(
957
            network_solved.storage_units_t.inflow.columns
958
        )
959
    ]
960
    neighbor_storage_t = network_prepared.storage_units_t["inflow"][
961
        neighbor_storage_t_index
962
    ]
963
964
    neighbor_storage.reset_index(inplace=True)
965
    neighbor_storage.bus = (
966
        neighbors.loc[neighbor_storage.bus, "new_index"]
967
        .reset_index()
968
        .new_index
969
    )
970
    neighbor_storage.index += db.next_etrago_id("storage")
971
972
    for i in neighbor_storage_t.columns:
973
        new_index = neighbor_storage[
974
            neighbor_storage["StorageUnit"] == i
975
        ].index
976
        neighbor_storage_t.rename(columns={i: new_index[0]}, inplace=True)
977
978
    # Connect to local database
979
    engine = db.engine()
980
981
    neighbors["scn_name"] = "eGon100RE"
982
    neighbors.index = neighbors["new_index"]
983
984
    # Correct geometry for non AC buses
985
    carriers = set(neighbors.carrier.to_list())
986
    carriers = [e for e in carriers if e not in ("AC")]
987
    non_AC_neighbors = pd.DataFrame()
988
    for c in carriers:
989
        c_neighbors = neighbors[neighbors.carrier == c].set_index(
990
            "location", drop=False
991
        )
992
        for i in ["x", "y"]:
993
            c_neighbors = c_neighbors.drop(i, axis=1)
994
        coordinates = neighbors[neighbors.carrier == "AC"][
995
            ["location", "x", "y"]
996
        ].set_index("location")
997
        c_neighbors = pd.concat([coordinates, c_neighbors], axis=1).set_index(
998
            "new_index", drop=False
999
        )
1000
        non_AC_neighbors = pd.concat([non_AC_neighbors, c_neighbors])
1001
1002
    neighbors = pd.concat(
1003
        [neighbors[neighbors.carrier == "AC"], non_AC_neighbors]
1004
    )
1005
1006
    for i in [
1007
        "new_index",
1008
        "control",
1009
        "generator",
1010
        "location",
1011
        "sub_network",
1012
        "unit",
1013
        "substation_lv",
1014
        "substation_off",
1015
    ]:
1016
        neighbors = neighbors.drop(i, axis=1)
1017
1018
    # Add geometry column
1019
    neighbors = (
1020
        gpd.GeoDataFrame(
1021
            neighbors, geometry=gpd.points_from_xy(neighbors.x, neighbors.y)
1022
        )
1023
        .rename_geometry("geom")
1024
        .set_crs(4326)
1025
    )
1026
1027
    # Unify carrier names
1028
    neighbors.carrier = neighbors.carrier.str.replace(" ", "_")
1029
    neighbors.carrier.replace(
1030
        {
1031
            "gas": "CH4",
1032
            "gas_for_industry": "CH4_for_industry",
1033
            "urban_central_heat": "central_heat",
1034
            "EV_battery": "Li_ion",
1035
            "urban_central_water_tanks": "central_heat_store",
1036
            "rural_water_tanks": "rural_heat_store",
1037
        },
1038
        inplace=True,
1039
    )
1040
1041
    neighbors[~neighbors.carrier.isin(["AC"])].to_postgis(
1042
        "egon_etrago_bus",
1043
        engine,
1044
        schema="grid",
1045
        if_exists="append",
1046
        index=True,
1047
        index_label="bus_id",
1048
    )
1049
1050
    # prepare and write neighboring crossborder lines to etrago tables
1051
    def lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE"):
1052
        neighbor_lines["scn_name"] = scn
1053
        neighbor_lines["cables"] = 3 * neighbor_lines["num_parallel"].astype(
1054
            int
1055
        )
1056
        neighbor_lines["s_nom"] = neighbor_lines["s_nom_min"]
1057
1058
        for i in [
1059
            "Line",
1060
            "x_pu_eff",
1061
            "r_pu_eff",
1062
            "sub_network",
1063
            "x_pu",
1064
            "r_pu",
1065
            "g_pu",
1066
            "b_pu",
1067
            "s_nom_opt",
1068
            "i_nom",
1069
            "dc",
1070
        ]:
1071
            neighbor_lines = neighbor_lines.drop(i, axis=1)
1072
1073
        # Define geometry and add to lines dataframe as 'topo'
1074
        gdf = gpd.GeoDataFrame(index=neighbor_lines.index)
1075
        gdf["geom_bus0"] = neighbors.geom[neighbor_lines.bus0].values
1076
        gdf["geom_bus1"] = neighbors.geom[neighbor_lines.bus1].values
1077
        gdf["geometry"] = gdf.apply(
1078
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
1079
        )
1080
1081
        neighbor_lines = (
1082
            gpd.GeoDataFrame(neighbor_lines, geometry=gdf["geometry"])
1083
            .rename_geometry("topo")
1084
            .set_crs(4326)
1085
        )
1086
1087
        neighbor_lines["lifetime"] = get_sector_parameters("electricity", scn)[
1088
            "lifetime"
1089
        ]["ac_ehv_overhead_line"]
1090
1091
        neighbor_lines.to_postgis(
1092
            "egon_etrago_line",
1093
            engine,
1094
            schema="grid",
1095
            if_exists="append",
1096
            index=True,
1097
            index_label="line_id",
1098
        )
1099
1100
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE")
1101
1102
    def links_to_etrago(neighbor_links, scn="eGon100RE", extendable=True):
1103
        """Prepare and write neighboring crossborder links to eTraGo table
1104
1105
        This function prepare the neighboring crossborder links
1106
        generated the PyPSA-eur-sec (p-e-s) run by:
1107
          * Delete the useless columns
1108
          * If extendable is false only (non default case):
1109
              * Replace p_nom = 0 with the p_nom_op values (arrising
1110
                from the p-e-s optimisation)
1111
              * Setting p_nom_extendable to false
1112
          * Add geomtry to the links: 'geom' and 'topo' columns
1113
          * Change the name of the carriers to have the consistent in
1114
            eGon-data
1115
1116
        The function insert then the link to the eTraGo table and has
1117
        no return.
1118
1119
        Parameters
1120
        ----------
1121
        neighbor_links : pandas.DataFrame
1122
            Dataframe containing the neighboring crossborder links
1123
        scn_name : str
1124
            Name of the scenario
1125
        extendable : bool
1126
            Boolean expressing if the links should be extendable or not
1127
1128
        Returns
1129
        -------
1130
        None
1131
1132
        """
1133
        neighbor_links["scn_name"] = scn
1134
1135
        dropped_carriers = [
1136
            "Link",
1137
            "geometry",
1138
            "tags",
1139
            "under_construction",
1140
            "underground",
1141
            "underwater_fraction",
1142
            "bus2",
1143
            "bus3",
1144
            "bus4",
1145
            "efficiency2",
1146
            "efficiency3",
1147
            "efficiency4",
1148
            "lifetime",
1149
            "pipe_retrofit",
1150
            "committable",
1151
            "start_up_cost",
1152
            "shut_down_cost",
1153
            "min_up_time",
1154
            "min_down_time",
1155
            "up_time_before",
1156
            "down_time_before",
1157
            "ramp_limit_up",
1158
            "ramp_limit_down",
1159
            "ramp_limit_start_up",
1160
            "ramp_limit_shut_down",
1161
            "length_original",
1162
            "reversed",
1163
            "location",
1164
            "project_status",
1165
            "dc",
1166
            "voltage",
1167
        ]
1168
1169
        if extendable:
1170
            dropped_carriers.append("p_nom_opt")
1171
            neighbor_links = neighbor_links.drop(
1172
                columns=dropped_carriers,
1173
                errors="ignore",
1174
            )
1175
1176
        else:
1177
            dropped_carriers.append("p_nom")
1178
            dropped_carriers.append("p_nom_extendable")
1179
            neighbor_links = neighbor_links.drop(
1180
                columns=dropped_carriers,
1181
                errors="ignore",
1182
            )
1183
            neighbor_links = neighbor_links.rename(
1184
                columns={"p_nom_opt": "p_nom"}
1185
            )
1186
            neighbor_links["p_nom_extendable"] = False
1187
1188
        if neighbor_links.empty:
1189
            print("No links selected")
1190
            return
1191
1192
        # Define geometry and add to lines dataframe as 'topo'
1193
        gdf = gpd.GeoDataFrame(
1194
            index=neighbor_links.index,
1195
            data={
1196
                "geom_bus0": neighbors.loc[neighbor_links.bus0, "geom"].values,
1197
                "geom_bus1": neighbors.loc[neighbor_links.bus1, "geom"].values,
1198
            },
1199
        )
1200
1201
        gdf["geometry"] = gdf.apply(
1202
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
1203
        )
1204
1205
        neighbor_links = (
1206
            gpd.GeoDataFrame(neighbor_links, geometry=gdf["geometry"])
1207
            .rename_geometry("topo")
1208
            .set_crs(4326)
1209
        )
1210
1211
        # Unify carrier names
1212
        neighbor_links.carrier = neighbor_links.carrier.str.replace(" ", "_")
1213
1214
        neighbor_links.carrier.replace(
1215
            {
1216
                "H2_Electrolysis": "power_to_H2",
1217
                "H2_Fuel_Cell": "H2_to_power",
1218
                "H2_pipeline_retrofitted": "H2_retrofit",
1219
                "SMR": "CH4_to_H2",
1220
                "Sabatier": "H2_to_CH4",
1221
                "gas_for_industry": "CH4_for_industry",
1222
                "gas_pipeline": "CH4",
1223
                "urban_central_gas_boiler": "central_gas_boiler",
1224
                "urban_central_resistive_heater": "central_resistive_heater",
1225
                "urban_central_water_tanks_charger": "central_heat_store_charger",
1226
                "urban_central_water_tanks_discharger": "central_heat_store_discharger",
1227
                "rural_water_tanks_charger": "rural_heat_store_charger",
1228
                "rural_water_tanks_discharger": "rural_heat_store_discharger",
1229
                "urban_central_gas_CHP": "central_gas_CHP",
1230
                "urban_central_air_heat_pump": "central_heat_pump",
1231
                "rural_ground_heat_pump": "rural_heat_pump",
1232
            },
1233
            inplace=True,
1234
        )
1235
1236
        H2_links = {
1237
            "H2_to_CH4": "H2_to_CH4",
1238
            "H2_to_power": "H2_to_power",
1239
            "power_to_H2": "power_to_H2_system",
1240
            "CH4_to_H2": "CH4_to_H2",
1241
        }
1242
1243
        for c in H2_links.keys():
1244
1245
            neighbor_links.loc[
1246
                (neighbor_links.carrier == c),
1247
                "lifetime",
1248
            ] = get_sector_parameters("gas", "eGon100RE")["lifetime"][
1249
                H2_links[c]
1250
            ]
1251
1252
        neighbor_links.to_postgis(
1253
            "egon_etrago_link",
1254
            engine,
1255
            schema="grid",
1256
            if_exists="append",
1257
            index=True,
1258
            index_label="link_id",
1259
        )
1260
1261
    extendable_links_carriers = [
1262
        "battery charger",
1263
        "battery discharger",
1264
        "home battery charger",
1265
        "home battery discharger",
1266
        "rural water tanks charger",
1267
        "rural water tanks discharger",
1268
        "urban central water tanks charger",
1269
        "urban central water tanks discharger",
1270
        "urban decentral water tanks charger",
1271
        "urban decentral water tanks discharger",
1272
        "H2 Electrolysis",
1273
        "H2 Fuel Cell",
1274
        "SMR",
1275
        "Sabatier",
1276
    ]
1277
1278
    # delete unwanted carriers for eTraGo
1279
    excluded_carriers = [
1280
        "gas for industry CC",
1281
        "SMR CC",
1282
        "DAC",
1283
    ]
1284
    neighbor_links = neighbor_links[
1285
        ~neighbor_links.carrier.isin(excluded_carriers)
1286
    ]
1287
1288
    # Combine CHP_CC and CHP
1289
    chp_cc = neighbor_links[
1290
        neighbor_links.carrier == "urban central gas CHP CC"
1291
    ]
1292
    for index, row in chp_cc.iterrows():
1293
        neighbor_links.loc[
1294
            neighbor_links.Link == row.Link.replace("CHP CC", "CHP"),
1295
            "p_nom_opt",
1296
        ] += row.p_nom_opt
1297
        neighbor_links.loc[
1298
            neighbor_links.Link == row.Link.replace("CHP CC", "CHP"), "p_nom"
1299
        ] += row.p_nom
1300
        neighbor_links.drop(index, inplace=True)
1301
1302
    # Combine heat pumps
1303
    # Like in Germany, there are air heat pumps in central heat grids
1304
    # and ground heat pumps in rural areas
1305
    rural_air = neighbor_links[neighbor_links.carrier == "rural air heat pump"]
1306
    for index, row in rural_air.iterrows():
1307
        neighbor_links.loc[
1308
            neighbor_links.Link == row.Link.replace("air", "ground"),
1309
            "p_nom_opt",
1310
        ] += row.p_nom_opt
1311
        neighbor_links.loc[
1312
            neighbor_links.Link == row.Link.replace("air", "ground"), "p_nom"
1313
        ] += row.p_nom
1314
        neighbor_links.drop(index, inplace=True)
1315
    links_to_etrago(
1316
        neighbor_links[neighbor_links.carrier.isin(extendable_links_carriers)],
1317
        "eGon100RE",
1318
    )
1319
    links_to_etrago(
1320
        neighbor_links[
1321
            ~neighbor_links.carrier.isin(extendable_links_carriers)
1322
        ],
1323
        "eGon100RE",
1324
        extendable=False,
1325
    )
1326
    # Include links time-series
1327
    # For heat_pumps
1328
    hp = neighbor_links[neighbor_links["carrier"].str.contains("heat pump")]
1329
1330
    neighbor_eff_t = network_prepared.links_t["efficiency"][
1331
        hp[hp.Link.isin(network_prepared.links_t["efficiency"].columns)].index
1332
    ]
1333
1334
    missing_hp = hp[~hp["Link"].isin(neighbor_eff_t.columns)].Link
1335
1336
    eff_timeseries = network_prepared.links_t["efficiency"].copy()
1337
    for met in missing_hp:  # met: missing efficiency timeseries
1338
        try:
1339
            neighbor_eff_t[met] = eff_timeseries.loc[:, met[0:-5]]
1340
        except:
1341
            print(f"There are not timeseries for heat_pump {met}")
1342
1343
    for i in neighbor_eff_t.columns:
1344
        new_index = neighbor_links[neighbor_links["Link"] == i].index
1345
        neighbor_eff_t.rename(columns={i: new_index[0]}, inplace=True)
1346
1347
    # Include links time-series
1348
    # For ev_chargers
1349
    ev = neighbor_links[neighbor_links["carrier"].str.contains("BEV charger")]
1350
1351
    ev_p_max_pu = network_prepared.links_t["p_max_pu"][
1352
        ev[ev.Link.isin(network_prepared.links_t["p_max_pu"].columns)].index
1353
    ]
1354
1355
    missing_ev = ev[~ev["Link"].isin(ev_p_max_pu.columns)].Link
1356
1357
    ev_p_max_pu_timeseries = network_prepared.links_t["p_max_pu"].copy()
1358
    for mct in missing_ev:  # evt: missing charger timeseries
1359
        try:
1360
            ev_p_max_pu[mct] = ev_p_max_pu_timeseries.loc[:, mct[0:-5]]
1361
        except:
1362
            print(f"There are not timeseries for EV charger {mct}")
1363
1364
    for i in ev_p_max_pu.columns:
1365
        new_index = neighbor_links[neighbor_links["Link"] == i].index
1366
        ev_p_max_pu.rename(columns={i: new_index[0]}, inplace=True)
1367
1368
    # prepare neighboring generators for etrago tables
1369
    neighbor_gens["scn_name"] = "eGon100RE"
1370
    neighbor_gens["p_nom"] = neighbor_gens["p_nom_opt"]
1371
    neighbor_gens["p_nom_extendable"] = False
1372
1373
    # Unify carrier names
1374
    neighbor_gens.carrier = neighbor_gens.carrier.str.replace(" ", "_")
1375
1376
    neighbor_gens.carrier.replace(
1377
        {
1378
            "onwind": "wind_onshore",
1379
            "ror": "run_of_river",
1380
            "offwind-ac": "wind_offshore",
1381
            "offwind-dc": "wind_offshore",
1382
            "offwind-float": "wind_offshore",
1383
            "urban_central_solar_thermal": "urban_central_solar_thermal_collector",
1384
            "residential_rural_solar_thermal": "residential_rural_solar_thermal_collector",
1385
            "services_rural_solar_thermal": "services_rural_solar_thermal_collector",
1386
            "solar-hsat": "solar",
1387
        },
1388
        inplace=True,
1389
    )
1390
1391
    for i in [
1392
        "Generator",
1393
        "weight",
1394
        "lifetime",
1395
        "p_set",
1396
        "q_set",
1397
        "p_nom_opt",
1398
        "e_sum_min",
1399
        "e_sum_max",
1400
    ]:
1401
        neighbor_gens = neighbor_gens.drop(i, axis=1)
1402
1403
    neighbor_gens.to_sql(
1404
        "egon_etrago_generator",
1405
        engine,
1406
        schema="grid",
1407
        if_exists="append",
1408
        index=True,
1409
        index_label="generator_id",
1410
    )
1411
1412
    # prepare neighboring loads for etrago tables
1413
    neighbor_loads["scn_name"] = "eGon100RE"
1414
1415
    # Unify carrier names
1416
    neighbor_loads.carrier = neighbor_loads.carrier.str.replace(" ", "_")
1417
1418
    neighbor_loads.carrier.replace(
1419
        {
1420
            "electricity": "AC",
1421
            "DC": "AC",
1422
            "industry_electricity": "AC",
1423
            "H2_pipeline_retrofitted": "H2_system_boundary",
1424
            "gas_pipeline": "CH4_system_boundary",
1425
            "gas_for_industry": "CH4_for_industry",
1426
            "urban_central_heat": "central_heat",
1427
        },
1428
        inplace=True,
1429
    )
1430
1431
    neighbor_loads = neighbor_loads.drop(
1432
        columns=["Load"],
1433
        errors="ignore",
1434
    )
1435
1436
    neighbor_loads.to_sql(
1437
        "egon_etrago_load",
1438
        engine,
1439
        schema="grid",
1440
        if_exists="append",
1441
        index=True,
1442
        index_label="load_id",
1443
    )
1444
1445
    # prepare neighboring stores for etrago tables
1446
    neighbor_stores["scn_name"] = "eGon100RE"
1447
1448
    # Unify carrier names
1449
    neighbor_stores.carrier = neighbor_stores.carrier.str.replace(" ", "_")
1450
1451
    neighbor_stores.carrier.replace(
1452
        {
1453
            "Li_ion": "battery",
1454
            "gas": "CH4",
1455
            "urban_central_water_tanks": "central_heat_store",
1456
            "rural_water_tanks": "rural_heat_store",
1457
            "EV_battery": "battery_storage",
1458
        },
1459
        inplace=True,
1460
    )
1461
    neighbor_stores.loc[
1462
        (
1463
            (neighbor_stores.e_nom_max <= 1e9)
1464
            & (neighbor_stores.carrier == "H2_Store")
1465
        ),
1466
        "carrier",
1467
    ] = "H2_underground"
1468
    neighbor_stores.loc[
1469
        (
1470
            (neighbor_stores.e_nom_max > 1e9)
1471
            & (neighbor_stores.carrier == "H2_Store")
1472
        ),
1473
        "carrier",
1474
    ] = "H2_overground"
1475
1476
    for i in [
1477
        "Store",
1478
        "p_set",
1479
        "q_set",
1480
        "e_nom_opt",
1481
        "lifetime",
1482
        "e_initial_per_period",
1483
        "e_cyclic_per_period",
1484
        "location",
1485
    ]:
1486
        neighbor_stores = neighbor_stores.drop(i, axis=1, errors="ignore")
1487
1488
    for c in ["H2_underground", "H2_overground"]:
1489
        neighbor_stores.loc[
1490
            (neighbor_stores.carrier == c),
1491
            "lifetime",
1492
        ] = get_sector_parameters("gas", "eGon100RE")["lifetime"][c]
1493
1494
    neighbor_stores.to_sql(
1495
        "egon_etrago_store",
1496
        engine,
1497
        schema="grid",
1498
        if_exists="append",
1499
        index=True,
1500
        index_label="store_id",
1501
    )
1502
1503
    # prepare neighboring storage_units for etrago tables
1504
    neighbor_storage["scn_name"] = "eGon100RE"
1505
1506
    # Unify carrier names
1507
    neighbor_storage.carrier = neighbor_storage.carrier.str.replace(" ", "_")
1508
1509
    neighbor_storage.carrier.replace(
1510
        {"PHS": "pumped_hydro", "hydro": "reservoir"}, inplace=True
1511
    )
1512
1513
    for i in [
1514
        "StorageUnit",
1515
        "p_nom_opt",
1516
        "state_of_charge_initial_per_period",
1517
        "cyclic_state_of_charge_per_period",
1518
    ]:
1519
        neighbor_storage = neighbor_storage.drop(i, axis=1, errors="ignore")
1520
1521
    neighbor_storage.to_sql(
1522
        "egon_etrago_storage",
1523
        engine,
1524
        schema="grid",
1525
        if_exists="append",
1526
        index=True,
1527
        index_label="storage_id",
1528
    )
1529
1530
    # writing neighboring loads_t p_sets to etrago tables
1531
1532
    neighbor_loads_t_etrago = pd.DataFrame(
1533
        columns=["scn_name", "temp_id", "p_set"],
1534
        index=neighbor_loads_t.columns,
1535
    )
1536
    neighbor_loads_t_etrago["scn_name"] = "eGon100RE"
1537
    neighbor_loads_t_etrago["temp_id"] = 1
1538
    for i in neighbor_loads_t.columns:
1539
        neighbor_loads_t_etrago["p_set"][i] = neighbor_loads_t[
1540
            i
1541
        ].values.tolist()
1542
1543
    neighbor_loads_t_etrago.to_sql(
1544
        "egon_etrago_load_timeseries",
1545
        engine,
1546
        schema="grid",
1547
        if_exists="append",
1548
        index=True,
1549
        index_label="load_id",
1550
    )
1551
1552
    # writing neighboring link_t efficiency and p_max_pu to etrago tables
1553
    neighbor_link_t_etrago = pd.DataFrame(
1554
        columns=["scn_name", "temp_id", "p_max_pu", "efficiency"],
1555
        index=neighbor_eff_t.columns.to_list() + ev_p_max_pu.columns.to_list(),
1556
    )
1557
    neighbor_link_t_etrago["scn_name"] = "eGon100RE"
1558
    neighbor_link_t_etrago["temp_id"] = 1
1559
    for i in neighbor_eff_t.columns:
1560
        neighbor_link_t_etrago["efficiency"][i] = neighbor_eff_t[
1561
            i
1562
        ].values.tolist()
1563
    for i in ev_p_max_pu.columns:
1564
        neighbor_link_t_etrago["p_max_pu"][i] = ev_p_max_pu[i].values.tolist()
1565
1566
    neighbor_link_t_etrago.to_sql(
1567
        "egon_etrago_link_timeseries",
1568
        engine,
1569
        schema="grid",
1570
        if_exists="append",
1571
        index=True,
1572
        index_label="link_id",
1573
    )
1574
1575
    # writing neighboring generator_t p_max_pu to etrago tables
1576
    neighbor_gens_t_etrago = pd.DataFrame(
1577
        columns=["scn_name", "temp_id", "p_max_pu"],
1578
        index=neighbor_gens_t.columns,
1579
    )
1580
    neighbor_gens_t_etrago["scn_name"] = "eGon100RE"
1581
    neighbor_gens_t_etrago["temp_id"] = 1
1582
    for i in neighbor_gens_t.columns:
1583
        neighbor_gens_t_etrago["p_max_pu"][i] = neighbor_gens_t[
1584
            i
1585
        ].values.tolist()
1586
1587
    neighbor_gens_t_etrago.to_sql(
1588
        "egon_etrago_generator_timeseries",
1589
        engine,
1590
        schema="grid",
1591
        if_exists="append",
1592
        index=True,
1593
        index_label="generator_id",
1594
    )
1595
1596
    # writing neighboring stores_t e_min_pu to etrago tables
1597
    neighbor_stores_t_etrago = pd.DataFrame(
1598
        columns=["scn_name", "temp_id", "e_min_pu"],
1599
        index=neighbor_stores_t.columns,
1600
    )
1601
    neighbor_stores_t_etrago["scn_name"] = "eGon100RE"
1602
    neighbor_stores_t_etrago["temp_id"] = 1
1603
    for i in neighbor_stores_t.columns:
1604
        neighbor_stores_t_etrago["e_min_pu"][i] = neighbor_stores_t[
1605
            i
1606
        ].values.tolist()
1607
1608
    neighbor_stores_t_etrago.to_sql(
1609
        "egon_etrago_store_timeseries",
1610
        engine,
1611
        schema="grid",
1612
        if_exists="append",
1613
        index=True,
1614
        index_label="store_id",
1615
    )
1616
1617
    # writing neighboring storage_units inflow to etrago tables
1618
    neighbor_storage_t_etrago = pd.DataFrame(
1619
        columns=["scn_name", "temp_id", "inflow"],
1620
        index=neighbor_storage_t.columns,
1621
    )
1622
    neighbor_storage_t_etrago["scn_name"] = "eGon100RE"
1623
    neighbor_storage_t_etrago["temp_id"] = 1
1624
    for i in neighbor_storage_t.columns:
1625
        neighbor_storage_t_etrago["inflow"][i] = neighbor_storage_t[
1626
            i
1627
        ].values.tolist()
1628
1629
    neighbor_storage_t_etrago.to_sql(
1630
        "egon_etrago_storage_timeseries",
1631
        engine,
1632
        schema="grid",
1633
        if_exists="append",
1634
        index=True,
1635
        index_label="storage_id",
1636
    )
1637
1638
    # writing neighboring lines_t s_max_pu to etrago tables
1639
    if not network_solved.lines_t["s_max_pu"].empty:
1640
        neighbor_lines_t_etrago = pd.DataFrame(
1641
            columns=["scn_name", "s_max_pu"], index=neighbor_lines_t.columns
1642
        )
1643
        neighbor_lines_t_etrago["scn_name"] = "eGon100RE"
1644
1645
        for i in neighbor_lines_t.columns:
1646
            neighbor_lines_t_etrago["s_max_pu"][i] = neighbor_lines_t[
1647
                i
1648
            ].values.tolist()
1649
1650
        neighbor_lines_t_etrago.to_sql(
1651
            "egon_etrago_line_timeseries",
1652
            engine,
1653
            schema="grid",
1654
            if_exists="append",
1655
            index=True,
1656
            index_label="line_id",
1657
        )
1658
1659
1660 View Code Duplication
def prepared_network(planning_horizon=3):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
1661
    if egon.data.config.settings()["egon-data"]["--run-pypsa-eur"]:
1662
        with open(
1663
            __path__[0] + "/datasets/pypsaeur/config_prepare.yaml", "r"
1664
        ) as stream:
1665
            data_config = yaml.safe_load(stream)
1666
1667
        target_file = (
1668
            Path(".")
1669
            / "run-pypsa-eur"
1670
            / "pypsa-eur"
1671
            / "results"
1672
            / data_config["run"]["name"]
1673
            / "prenetworks"
1674
            / f"base_s_{data_config['scenario']['clusters'][0]}"
1675
            f"_l{data_config['scenario']['ll'][0]}"
1676
            f"_{data_config['scenario']['opts'][0]}"
1677
            f"_{data_config['scenario']['sector_opts'][0]}"
1678
            f"_{data_config['scenario']['planning_horizons'][planning_horizon]}.nc"
1679
        )
1680
1681
    else:
1682
        target_file = (
1683
            Path(".")
1684
            / "data_bundle_egon_data"
1685
            / "pypsa_eur"
1686
            / "prenetworks"
1687
            / "prenetwork_post-manipulate_pre-solve"
1688
            / "base_s_39_lc1.25__cb40ex0-T-H-I-B-solar+p3-dist1_2045.nc"
1689
        )
1690
1691
    return pypsa.Network(target_file.absolute().as_posix())
1692
1693
1694
def overwrite_H2_pipeline_share():
1695
    """Overwrite retrofitted_CH4pipeline-to-H2pipeline_share value
1696
1697
    Overwrite retrofitted_CH4pipeline-to-H2pipeline_share in the
1698
    scenario parameter table if p-e-s is run.
1699
    This function write in the database and has no return.
1700
1701
    """
1702
    scn_name = "eGon100RE"
1703
    # Select source and target from dataset configuration
1704
    target = egon.data.config.datasets()["pypsa-eur-sec"]["target"]
1705
1706
    n = read_network()
1707
1708
    H2_pipelines = n.links[n.links["carrier"] == "H2 pipeline retrofitted"]
1709
    CH4_pipelines = n.links[n.links["carrier"] == "gas pipeline"]
1710
    H2_pipes_share = np.mean(
1711
        [
1712
            (i / j)
1713
            for i, j in zip(
1714
                H2_pipelines.p_nom_opt.to_list(), CH4_pipelines.p_nom.to_list()
1715
            )
1716
        ]
1717
    )
1718
    logger.info(
1719
        "retrofitted_CH4pipeline-to-H2pipeline_share = " + str(H2_pipes_share)
1720
    )
1721
1722
    parameters = db.select_dataframe(
1723
        f"""
1724
        SELECT *
1725
        FROM {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1726
        WHERE name = '{scn_name}'
1727
        """
1728
    )
1729
1730
    gas_param = parameters.loc[0, "gas_parameters"]
1731
    gas_param["retrofitted_CH4pipeline-to-H2pipeline_share"] = H2_pipes_share
1732
    gas_param = json.dumps(gas_param)
1733
1734
    # Update data in db
1735
    db.execute_sql(
1736
        f"""
1737
    UPDATE {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1738
    SET gas_parameters = '{gas_param}'
1739
    WHERE name = '{scn_name}';
1740
    """
1741
    )
1742
1743
1744
def update_electrical_timeseries_germany(network):
1745
    """Replace electrical demand time series in Germany with data from egon-data
1746
1747
    Parameters
1748
    ----------
1749
    network : pypsa.Network
1750
        Network including demand time series from pypsa-eur
1751
1752
    Returns
1753
    -------
1754
    network : pypsa.Network
1755
        Network including electrical demand time series in Germany from egon-data
1756
1757
    """
1758
    year = network.year
1759
    skip = network.snapshot_weightings.objective.iloc[0].astype("int")
1760
    df = pd.read_csv(
1761
        "input-pypsa-eur-sec/electrical_demand_timeseries_DE_eGon100RE.csv"
1762
    )
1763
1764
    annual_demand = pd.Series(index=[2019, 2037])
1765
    annual_demand_industry = pd.Series(index=[2019, 2037])
1766
    # Define values from status2019 for interpolation
1767
    # Residential and service (in TWh)
1768
    annual_demand.loc[2019] = 124.71 + 143.26
1769
    # Industry (in TWh)
1770
    annual_demand_industry.loc[2019] = 241.925
1771
1772
    # Define values from NEP 2023 scenario B 2037 for interpolation
1773
    # Residential and service (in TWh)
1774
    annual_demand.loc[2037] = 104 + 153.1
1775
    # Industry (in TWh)
1776
    annual_demand_industry.loc[2037] = 334.0
1777
1778
    # Set interpolated demands for years between 2019 and 2045
1779
    if year < 2037:
1780
        # Calculate annual demands for year by linear interpolating between
1781
        # 2019 and 2037
1782
        # Done seperatly for industry and residential and service to fit
1783
        # to pypsa-eurs structure
1784
        annual_rate = (annual_demand.loc[2037] - annual_demand.loc[2019]) / (
1785
            2037 - 2019
1786
        )
1787
        annual_demand_year = annual_demand.loc[2019] + annual_rate * (
1788
            year - 2019
1789
        )
1790
1791
        annual_rate_industry = (
1792
            annual_demand_industry.loc[2037] - annual_demand_industry.loc[2019]
1793
        ) / (2037 - 2019)
1794
        annual_demand_year_industry = annual_demand_industry.loc[
1795
            2019
1796
        ] + annual_rate_industry * (year - 2019)
1797
1798
        # Scale time series for 100% scenario with the annual demands
1799
        # The shape of the curve is taken from the 100% scenario since the
1800
        # same weather and calender year is used there
1801
        network.loads_t.p_set.loc[:, "DE0 0"] = (
1802
            df["residential_and_service"].loc[::skip]
1803
            / df["residential_and_service"].sum()
1804
            * annual_demand_year
1805
            * 1e6
1806
        ).values
1807
1808
        network.loads_t.p_set.loc[:, "DE0 0 industry electricity"] = (
1809
            df["industry"].loc[::skip]
1810
            / df["industry"].sum()
1811
            * annual_demand_year_industry
1812
            * 1e6
1813
        ).values
1814
1815
    elif year == 2045:
1816
        network.loads_t.p_set.loc[:, "DE0 0"] = df[
1817
            "residential_and_service"
1818
        ].loc[::skip]
1819
1820
        network.loads_t.p_set.loc[:, "DE0 0 industry electricity"] = (
1821
            df["industry"].loc[::skip].values
1822
        )
1823
1824
    else:
1825
        print(
1826
            "Scaling not implemented for years between 2037 and 2045 and beyond."
1827
        )
1828
        return
1829
1830
    network.loads.loc["DE0 0 industry electricity", "p_set"] = 0.0
1831
1832
    return network
1833
1834
1835
def geothermal_district_heating(network):
1836
    """Add the option to build geothermal power plants in district heating in Germany
1837
1838
    Parameters
1839
    ----------
1840
    network : pypsa.Network
1841
        Network from pypsa-eur without geothermal generators
1842
1843
    Returns
1844
    -------
1845
    network : pypsa.Network
1846
        Updated network with geothermal generators
1847
1848
    """
1849
1850
    costs_and_potentials = pd.read_csv(
1851
        "input-pypsa-eur-sec/geothermal_potential_germany.csv"
1852
    )
1853
1854
    network.add("Carrier", "urban central geo thermal")
1855
1856
    for i, row in costs_and_potentials.iterrows():
1857
        # Set lifetime of geothermal plant to 30 years based on:
1858
        # Ableitung eines Korridors für den Ausbau der erneuerbaren Wärme im Gebäudebereich,
1859
        # Beuth Hochschule für Technik, Berlin ifeu – Institut für Energie- und Umweltforschung Heidelberg GmbH
1860
        # Februar 2017
1861
        lifetime_geothermal = 30
1862
1863
        network.add(
1864
            "Generator",
1865
            f"DE0 0 urban central geo thermal {i}",
1866
            bus="DE0 0 urban central heat",
1867
            carrier="urban central geo thermal",
1868
            p_nom_extendable=True,
1869
            p_nom_max=row["potential [MW]"],
1870
            capital_cost=annualize_capital_costs(
1871
                row["cost [EUR/kW]"] * 1e6, lifetime_geothermal, 0.07
1872
            ),
1873
        )
1874
    return network
1875
1876
1877
def h2_overground_stores(network):
1878
    """Add hydrogen overground stores to each hydrogen node
1879
1880
    In pypsa-eur, only countries without the potential of underground hydrogen
1881
    stores have to option to build overground hydrogen tanks.
1882
    Overground stores are more expensive, but are not resitcted by the geological
1883
    potential. To allow higher hydrogen store capacities in each country, optional
1884
    hydogen overground tanks are also added to node with a potential for
1885
    underground stores.
1886
1887
    Parameters
1888
    ----------
1889
    network : pypsa.Network
1890
        Network without hydrogen overground stores at each hydrogen node
1891
1892
    Returns
1893
    -------
1894
    network : pypsa.Network
1895
        Network with hydrogen overground stores at each hydrogen node
1896
1897
    """
1898
1899
    underground_h2_stores = network.stores[
1900
        (network.stores.carrier == "H2 Store")
1901
        & (network.stores.e_nom_max != np.inf)
1902
    ]
1903
1904
    overground_h2_stores = network.stores[
1905
        (network.stores.carrier == "H2 Store")
1906
        & (network.stores.e_nom_max == np.inf)
1907
    ]
1908
1909
    network.madd(
1910
        "Store",
1911
        underground_h2_stores.bus + " overground Store",
1912
        bus=underground_h2_stores.bus.values,
1913
        e_nom_extendable=True,
1914
        e_cyclic=True,
1915
        carrier="H2 Store",
1916
        capital_cost=overground_h2_stores.capital_cost.mean(),
1917
    )
1918
1919
    return network
1920
1921
1922
def update_heat_timeseries_germany(network):
1923
    network.loads
1924
    # Import heat demand curves for Germany from eGon-data
1925
    df_egon_heat_demand = pd.read_csv(
1926
        "input-pypsa-eur-sec/heat_demand_timeseries_DE_eGon100RE.csv"
1927
    )
1928
1929
    # Replace heat demand curves in Germany with values from eGon-data
1930
    network.loads_t.p_set.loc[:, "DE1 0 rural heat"] = (
1931
        df_egon_heat_demand.loc[:, "residential rural"].values
1932
        + df_egon_heat_demand.loc[:, "service rural"].values
1933
    )
1934
1935
    network.loads_t.p_set.loc[:, "DE1 0 urban central heat"] = (
1936
        df_egon_heat_demand.loc[:, "urban central"].values
1937
    )
1938
1939
    return network
1940
1941
1942
def drop_biomass(network):
1943
    carrier = "biomass"
1944
1945
    for c in network.iterate_components():
1946
        network.mremove(c.name, c.df[c.df.index.str.contains(carrier)].index)
1947
    return network
1948
1949
1950
def postprocessing_biomass_2045():
1951
1952
    network = read_network()
1953
    network = drop_biomass(network)
1954
1955
    with open(
1956
        __path__[0] + "/datasets/pypsaeur/config_solve.yaml", "r"
1957
    ) as stream:
1958
        data_config = yaml.safe_load(stream)
1959
1960
    target_file = (
1961
        Path(".")
1962
        / "run-pypsa-eur"
1963
        / "pypsa-eur"
1964
        / "results"
1965
        / data_config["run"]["name"]
1966
        / "postnetworks"
1967
        / f"base_s_{data_config['scenario']['clusters'][0]}"
1968
        f"_l{data_config['scenario']['ll'][0]}"
1969
        f"_{data_config['scenario']['opts'][0]}"
1970
        f"_{data_config['scenario']['sector_opts'][0]}"
1971
        f"_{data_config['scenario']['planning_horizons'][3]}.nc"
1972
    )
1973
1974
    network.export_to_netcdf(target_file)
1975
1976
1977
def drop_urban_decentral_heat(network):
1978
    carrier = "urban decentral heat"
1979
1980
    # Add urban decentral heat demand to urban central heat demand
1981
    for country in network.loads.loc[
1982
        network.loads.carrier == carrier, "bus"
1983
    ].str[:5]:
1984
1985
        if f"{country} {carrier}" in network.loads_t.p_set.columns:
1986
            network.loads_t.p_set[
1987
                f"{country} rural heat"
1988
            ] += network.loads_t.p_set[f"{country} {carrier}"]
1989
        else:
1990
            print(
1991
                f"""No time series available for {country} {carrier}.
1992
                  Using static p_set."""
1993
            )
1994
1995
            network.loads_t.p_set[
1996
                f"{country} rural heat"
1997
            ] += network.loads.loc[f"{country} {carrier}", "p_set"]
1998
1999
    # In some cases low-temperature heat for industry is connected to the urban
2000
    # decentral heat bus since there is no urban central heat bus.
2001
    # These loads are connected to the representatiive rural heat bus:
2002
    network.loads.loc[
2003
        (network.loads.bus.str.contains(carrier))
2004
        & (~network.loads.carrier.str.contains(carrier.replace(" heat", ""))),
2005
        "bus",
2006
    ] = network.loads.loc[
2007
        (network.loads.bus.str.contains(carrier))
2008
        & (~network.loads.carrier.str.contains(carrier.replace(" heat", ""))),
2009
        "bus",
2010
    ].str.replace(
2011
        "urban decentral", "rural"
2012
    )
2013
2014
    # Drop componentents attached to urban decentral heat
2015
    for c in network.iterate_components():
2016
        network.mremove(
2017
            c.name, c.df[c.df.index.str.contains("urban decentral")].index
2018
        )
2019
2020
    return network
2021
2022
2023
def district_heating_shares(network):
2024
    df = pd.read_csv(
2025
        "data_bundle_powerd_data/district_heating_shares_egon.csv"
2026
    ).set_index("country_code")
2027
2028
    heat_demand_per_country = (
2029
        network.loads_t.p_set[
2030
            network.loads[
2031
                (network.loads.carrier.str.contains("heat"))
2032
                & network.loads.index.isin(network.loads_t.p_set.columns)
2033
            ].index
2034
        ]
2035
        .groupby(network.loads.bus.str[:5], axis=1)
2036
        .sum()
2037
    )
2038
2039
    for country in heat_demand_per_country.columns:
2040
        network.loads_t.p_set[f"{country} urban central heat"] = (
2041
            heat_demand_per_country.loc[:, country].mul(
2042
                df.loc[country[:2]].values[0]
2043
            )
2044
        )
2045
        network.loads_t.p_set[f"{country} rural heat"] = (
2046
            heat_demand_per_country.loc[:, country].mul(
2047
                (1 - df.loc[country[:2]].values[0])
2048
            )
2049
        )
2050
2051
    # Drop links with undefined buses or carrier
2052
    network.mremove(
2053
        "Link",
2054
        network.links[
2055
            ~network.links.bus0.isin(network.buses.index.values)
2056
        ].index,
2057
    )
2058
    network.mremove(
2059
        "Link",
2060
        network.links[network.links.carrier == ""].index,
2061
    )
2062
2063
    return network
2064
2065
2066
def drop_new_gas_pipelines(network):
2067
    network.mremove(
2068
        "Link",
2069
        network.links[
2070
            network.links.index.str.contains("gas pipeline new")
2071
        ].index,
2072
    )
2073
2074
    return network
2075
2076
2077
def drop_fossil_gas(network):
2078
    network.mremove(
2079
        "Generator",
2080
        network.generators[network.generators.carrier == "gas"].index,
2081
    )
2082
2083
    return network
2084
2085
2086
def drop_conventional_power_plants(network):
2087
2088
    # Drop lignite and coal power plants in Germany
2089
    network.mremove(
2090
        "Link",
2091
        network.links[
2092
            (network.links.carrier.isin(["coal", "lignite"]))
2093
            & (network.links.bus1.str.startswith("DE"))
2094
        ].index,
2095
    )
2096
2097
    return network
2098
2099
2100
def rual_heat_technologies(network):
2101
    network.mremove(
2102
        "Link",
2103
        network.links[
2104
            network.links.index.str.contains("rural gas boiler")
2105
        ].index,
2106
    )
2107
2108
    network.mremove(
2109
        "Generator",
2110
        network.generators[
2111
            network.generators.carrier.str.contains("rural solar thermal")
2112
        ].index,
2113
    )
2114
2115
    return network
2116
2117
2118
def coal_exit_D():
2119
2120
    df = pd.read_csv(
2121
        "run-pypsa-eur/pypsa-eur/resources/powerplants_s_39.csv", index_col=0
2122
    )
2123
    df_de_coal = df[
2124
        (df.Country == "DE")
2125
        & ((df.Fueltype == "Lignite") | (df.Fueltype == "Hard Coal"))
2126
    ]
2127
    df_de_coal.loc[df_de_coal.DateOut.values >= 2035, "DateOut"] = 2034
2128
    df.loc[df_de_coal.index] = df_de_coal
2129
2130
    df.to_csv("run-pypsa-eur/pypsa-eur/resources/powerplants_s_39.csv")
2131
2132
2133
def offwind_potential_D(network, capacity_per_sqkm=4):
2134
2135
    offwind_ac_factor = 1942
2136
    offwind_dc_factor = 10768
2137
    offwind_float_factor = 134
2138
2139
    # set p_nom_max for German offshore with respect to capacity_per_sqkm = 4 instead of default 2 (which is applied for the rest of Europe)
2140
    network.generators.loc[
2141
        (network.generators.bus == "DE0 0")
2142
        & (network.generators.carrier == "offwind-ac"),
2143
        "p_nom_max",
2144
    ] = (
2145
        offwind_ac_factor * capacity_per_sqkm
2146
    )
2147
    network.generators.loc[
2148
        (network.generators.bus == "DE0 0")
2149
        & (network.generators.carrier == "offwind-dc"),
2150
        "p_nom_max",
2151
    ] = (
2152
        offwind_dc_factor * capacity_per_sqkm
2153
    )
2154
    network.generators.loc[
2155
        (network.generators.bus == "DE0 0")
2156
        & (network.generators.carrier == "offwind-float"),
2157
        "p_nom_max",
2158
    ] = (
2159
        offwind_float_factor * capacity_per_sqkm
2160
    )
2161
2162
    return network
2163
2164
2165
def additional_grid_expansion_2045(network):
2166
2167
    network.global_constraints.loc["lc_limit", "constant"] *= 1.05
2168
2169
    return network
2170
2171
2172
def execute():
2173
    if egon.data.config.settings()["egon-data"]["--run-pypsa-eur"]:
2174
        with open(
2175
            __path__[0] + "/datasets/pypsaeur/config.yaml", "r"
2176
        ) as stream:
2177
            data_config = yaml.safe_load(stream)
2178
2179
        if data_config["foresight"] == "myopic":
2180
2181
            print("Adjusting scenarios on the myopic pathway...")
2182
2183
            coal_exit_D()
2184
2185
            networks = pd.Series()
2186
2187
            for i in range(
2188
                0, len(data_config["scenario"]["planning_horizons"])
2189
            ):
2190
                nc_file = pd.Series(
2191
                    f"base_s_{data_config['scenario']['clusters'][0]}"
2192
                    f"_l{data_config['scenario']['ll'][0]}"
2193
                    f"_{data_config['scenario']['opts'][0]}"
2194
                    f"_{data_config['scenario']['sector_opts'][0]}"
2195
                    f"_{data_config['scenario']['planning_horizons'][i]}.nc"
2196
                )
2197
                networks = networks._append(nc_file)
2198
2199
            scn_path = pd.DataFrame(
2200
                index=["2025", "2030", "2035", "2045"],
2201
                columns=["prenetwork", "functions"],
2202
            )
2203
2204
            for year in scn_path.index:
2205
                scn_path.at[year, "prenetwork"] = networks[
2206
                    networks.str.contains(year)
2207
                ].values
2208
2209
            for year in ["2025", "2030", "2035"]:
2210
                scn_path.loc[year, "functions"] = [
2211
                    # drop_urban_decentral_heat,
2212
                    update_electrical_timeseries_germany,
2213
                    geothermal_district_heating,
2214
                    h2_overground_stores,
2215
                    drop_new_gas_pipelines,
2216
                    offwind_potential_D,
2217
                ]
2218
2219
            scn_path.loc["2045", "functions"] = [
2220
                drop_biomass,
2221
                # drop_urban_decentral_heat,
2222
                update_electrical_timeseries_germany,
2223
                geothermal_district_heating,
2224
                h2_overground_stores,
2225
                drop_new_gas_pipelines,
2226
                drop_fossil_gas,
2227
                offwind_potential_D,
2228
                additional_grid_expansion_2045,
2229
                # drop_conventional_power_plants,
2230
                # rual_heat_technologies, #To be defined
2231
            ]
2232
2233
            network_path = (
2234
                Path(".")
2235
                / "run-pypsa-eur"
2236
                / "pypsa-eur"
2237
                / "results"
2238
                / data_config["run"]["name"]
2239
                / "prenetworks"
2240
            )
2241
2242
            for scn in scn_path.index:
2243
                path = network_path / scn_path.at[scn, "prenetwork"]
2244
                network = pypsa.Network(path)
2245
                network.year = int(scn)
2246
                for manipulator in scn_path.at[scn, "functions"]:
2247
                    network = manipulator(network)
2248
                network.export_to_netcdf(path)
2249
2250
        elif (data_config["foresight"] == "overnight") & (
2251
            int(data_config["scenario"]["planning_horizons"][0]) > 2040
2252
        ):
2253
2254
            print("Adjusting overnight long-term scenario...")
2255
2256
            network_path = (
2257
                Path(".")
2258
                / "run-pypsa-eur"
2259
                / "pypsa-eur"
2260
                / "results"
2261
                / data_config["run"]["name"]
2262
                / "prenetworks"
2263
                / f"elec_s_{data_config['scenario']['clusters'][0]}"
2264
                f"_l{data_config['scenario']['ll'][0]}"
2265
                f"_{data_config['scenario']['opts'][0]}"
2266
                f"_{data_config['scenario']['sector_opts'][0]}"
2267
                f"_{data_config['scenario']['planning_horizons'][0]}.nc"
2268
            )
2269
2270
            network = pypsa.Network(network_path)
2271
2272
            network = drop_biomass(network)
2273
2274
            network = drop_urban_decentral_heat(network)
2275
2276
            network = district_heating_shares(network)
2277
2278
            network = update_heat_timeseries_germany(network)
2279
2280
            network = update_electrical_timeseries_germany(network)
2281
2282
            network = geothermal_district_heating(network)
2283
2284
            network = h2_overground_stores(network)
2285
2286
            network = drop_new_gas_pipelines(network)
2287
2288
            network = drop_fossil_gas(network)
2289
2290
            network = rual_heat_technologies(network)
2291
2292
            network.export_to_netcdf(network_path)
2293
2294
        else:
2295
            print(
2296
                f"""Adjustments on prenetworks are not implemented for
2297
                foresight option {data_config['foresight']} and
2298
                year int(data_config['scenario']['planning_horizons'][0].
2299
                Please check the pypsaeur.execute function.
2300
                """
2301
            )
2302
    else:
2303
        print("Pypsa-eur is not executed due to the settings of egon-data")
2304