electrical_neighbours_egon100()   A
last analyzed

Complexity

Conditions 2

Size

Total Lines 7
Code Lines 5

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 5
dl 0
loc 7
rs 10
c 0
b 0
f 0
cc 2
nop 0
1
"""The central module containing all code dealing with importing data from
2
the pysa-eur-sec scenario parameter creation
3
"""
4
5
from pathlib import Path
6
from urllib.request import urlretrieve
7
import json
8
import shutil
9
10
from shapely.geometry import LineString
11
import geopandas as gpd
12
import importlib_resources as resources
13
import numpy as np
14
import pandas as pd
15
import pypsa
16
import requests
17
import yaml
18
19
from egon.data import __path__, config, db, logger
20
from egon.data.datasets import Dataset
21
from egon.data.datasets.scenario_parameters import get_sector_parameters
22
from egon.data.datasets.scenario_parameters.parameters import (
23
    annualize_capital_costs,
24
)
25
import egon.data.config
26
import egon.data.subprocess as subproc
27
28
29
class PreparePypsaEur(Dataset):
30
    def __init__(self, dependencies):
31
        super().__init__(
32
            name="PreparePypsaEur",
33
            version="0.0.42",
34
            dependencies=dependencies,
35
            tasks=(
36
                download,
37
                prepare_network,
38
            ),
39
        )
40
41
42
class RunPypsaEur(Dataset):
43
    def __init__(self, dependencies):
44
        super().__init__(
45
            name="SolvePypsaEur",
46
            version="0.0.42",
47
            dependencies=dependencies,
48
            tasks=(
49
                prepare_network_2,
50
                execute,
51
                solve_network,
52
                clean_database,
53
                electrical_neighbours_egon100,
54
                h2_neighbours_egon2035,
55
                # Dropped until we decided how we deal with the H2 grid
56
                # overwrite_H2_pipeline_share,
57
            ),
58
        )
59
60
61
def countries_list():
62
    return [
63
        "DE",
64
        "AT",
65
        "CH",
66
        "CZ",
67
        "PL",
68
        "SE",
69
        "NO",
70
        "DK",
71
        "GB",
72
        "NL",
73
        "BE",
74
        "FR",
75
        "LU",
76
    ]
77
78
79
def h2_neighbours_egon2035():
80
    """
81
    This function load the pypsa_eur network for eGon2035, processes the H2
82
    buses and insert them into the grid.egon_etrago_bus table.
83
84
    Returns
85
    -------
86
    None.
87
88
    """
89
    if "eGon2035" in config.settings()["egon-data"]["--scenarios"]:
90
        # Delete buses from previous executions
91
        db.execute_sql(
92
            """
93
            DELETE FROM grid.egon_etrago_bus WHERE carrier = 'H2'
94
            AND scn_name = 'eGon2035'
95
            AND country <> 'DE'
96
            """
97
        )
98
99
        # Load calculated network for eGon2035
100
        n = read_network(planning_horizon=2035)
101
102
        # Filter only H2 buses in selected foreign countries
103
        h2_bus = n.buses[(n.buses.country != "DE") & (n.buses.carrier == "H2")]
104
        wanted_countries = countries_list()
105
        h2_bus = h2_bus[
106
            (h2_bus.country.isin(wanted_countries))
107
            & (~h2_bus.index.str.contains("FR6"))
108
        ]
109
110
        # Add geometry column
111
        h2_bus = (
112
            gpd.GeoDataFrame(
113
                h2_bus, geometry=gpd.points_from_xy(h2_bus.x, h2_bus.y)
114
            )
115
            .rename_geometry("geom")
116
            .set_crs(4326)
117
        )
118
119
        # Adjust dataframe to the database table format
120
        h2_bus["scn_name"] = "eGon2035"
121
122
        bus_id = db.next_etrago_id("bus")  # will be change in PR1287
123
        ### Delete when PR1287 is merged ###
124
        bus_id = range(bus_id, bus_id + len(h2_bus.index))
125
        ####################################
126
        h2_bus["bus_id"] = bus_id
127
128
        h2_bus.drop(
129
            columns=[
130
                "unit",
131
                "control",
132
                "generator",
133
                "location",
134
                "substation_off",
135
                "substation_lv",
136
                "sub_network",
137
            ],
138
            inplace=True,
139
        )
140
141
        # Connect to local database and write results
142
        engine = db.engine()
143
144
        h2_bus.to_postgis(
145
            "egon_etrago_bus",
146
            engine,
147
            schema="grid",
148
            if_exists="append",
149
            index=False,
150
        )
151
    else:
152
        print("eGon2035 is not in the list of scenarios")
153
154
155
def download():
156
    cwd = Path(".")
157
    filepath = cwd / "run-pypsa-eur"
158
    filepath.mkdir(parents=True, exist_ok=True)
159
160
    pypsa_eur_repos = filepath / "pypsa-eur"
161
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
162
        if not pypsa_eur_repos.exists():
163
            subproc.run(
164
                [
165
                    "git",
166
                    "clone",
167
                    "--branch",
168
                    "master",
169
                    "https://github.com/PyPSA/pypsa-eur.git",
170
                    pypsa_eur_repos,
171
                ]
172
            )
173
174
            subproc.run(
175
                [
176
                    "git",
177
                    "checkout",
178
                    "2119f4cee05c256509f48d4e9fe0d8fd9e9e3632",
179
                ],
180
                cwd=pypsa_eur_repos,
181
            )
182
183
            # Add gurobi solver to environment:
184
            # Read YAML file
185
            # path_to_env = pypsa_eur_repos / "envs" / "environment.yaml"
186
            # with open(path_to_env, "r") as stream:
187
            #    env = yaml.safe_load(stream)
188
189
            # The version of gurobipy has to fit to the version of gurobi.
190
            # Since we mainly use gurobi 10.0 this is set here.
191
            # env["dependencies"][-1]["pip"].append("gurobipy==10.0.0")
192
193
            # Set python version to <3.12
194
            # Python<=3.12 needs gurobipy>=11.0, in case gurobipy is updated,
195
            # this can be removed
196
            # env["dependencies"] = [
197
            #    "python>=3.8,<3.12" if x == "python>=3.8" else x
198
            #    for x in env["dependencies"]
199
            # ]
200
201
            # Limit geopandas version
202
            # our pypsa-eur version is not compatible to geopandas>1
203
            # env["dependencies"] = [
204
            #    "geopandas>=0.11.0,<1" if x == "geopandas>=0.11.0" else x
205
            #    for x in env["dependencies"]
206
            # ]
207
208
            # Write YAML file
209
            # with open(path_to_env, "w", encoding="utf8") as outfile:
210
            #    yaml.dump(
211
            #        env, outfile, default_flow_style=False, allow_unicode=True
212
            #    )
213
214
            # Copy config file for egon-data to pypsa-eur directory
215
            shutil.copy(
216
                Path(
217
                    __path__[0], "datasets", "pypsaeur", "config_prepare.yaml"
218
                ),
219
                pypsa_eur_repos / "config" / "config.yaml",
220
            )
221
222
            # Copy custom_extra_functionality.py file for egon-data to pypsa-eur directory
223
            shutil.copy(
224
                Path(
225
                    __path__[0],
226
                    "datasets",
227
                    "pypsaeur",
228
                    "custom_extra_functionality.py",
229
                ),
230
                pypsa_eur_repos / "data",
231
            )
232
233
            with open(filepath / "Snakefile", "w") as snakefile:
234
                snakefile.write(
235
                    resources.read_text(
236
                        "egon.data.datasets.pypsaeur", "Snakefile"
237
                    )
238
                )
239
240
        # Copy era5 weather data to folder for pypsaeur
241
        era5_pypsaeur_path = filepath / "pypsa-eur" / "cutouts"
242
243
        if not era5_pypsaeur_path.exists():
244
            era5_pypsaeur_path.mkdir(parents=True, exist_ok=True)
245
            copy_from = config.datasets()["era5_weather_data"]["targets"][
246
                "weather_data"
247
            ]["path"]
248
            filename = "europe-2011-era5.nc"
249
            shutil.copy(
250
                copy_from + "/" + filename, era5_pypsaeur_path / filename
251
            )
252
253
        # Workaround to download natura, shipdensity and globalenergymonitor
254
        # data, which is not working in the regular snakemake workflow.
255
        # The same files are downloaded from the same directory as in pypsa-eur
256
        # version 0.10 here. Is is stored in the folders from pypsa-eur.
257
        if not (filepath / "pypsa-eur" / "resources").exists():
258
            (filepath / "pypsa-eur" / "resources").mkdir(
259
                parents=True, exist_ok=True
260
            )
261
        urlretrieve(
262
            "https://zenodo.org/record/4706686/files/natura.tiff",
263
            filepath / "pypsa-eur" / "resources" / "natura.tiff",
264
        )
265
266
        if not (filepath / "pypsa-eur" / "data").exists():
267
            (filepath / "pypsa-eur" / "data").mkdir(
268
                parents=True, exist_ok=True
269
            )
270
        urlretrieve(
271
            "https://zenodo.org/record/13757228/files/shipdensity_global.zip",
272
            filepath / "pypsa-eur" / "data" / "shipdensity_global.zip",
273
        )
274
275
        if not (
276
            filepath
277
            / "pypsa-eur"
278
            / "zenodo.org"
279
            / "records"
280
            / "13757228"
281
            / "files"
282
        ).exists():
283
            (
284
                filepath
285
                / "pypsa-eur"
286
                / "zenodo.org"
287
                / "records"
288
                / "13757228"
289
                / "files"
290
            ).mkdir(parents=True, exist_ok=True)
291
292
        urlretrieve(
293
            "https://zenodo.org/records/10356004/files/ENSPRESO_BIOMASS.xlsx",
294
            filepath
295
            / "pypsa-eur"
296
            / "zenodo.org"
297
            / "records"
298
            / "13757228"
299
            / "files"
300
            / "ENSPRESO_BIOMASS.xlsx",
301
        )
302
303
        if not (filepath / "pypsa-eur" / "data" / "gem").exists():
304
            (filepath / "pypsa-eur" / "data" / "gem").mkdir(
305
                parents=True, exist_ok=True
306
            )
307
308
        r = requests.get(
309
            "https://tubcloud.tu-berlin.de/s/LMBJQCsN6Ez5cN2/download/"
310
            "Europe-Gas-Tracker-2024-05.xlsx"
311
        )
312
        with open(
313
            filepath
314
            / "pypsa-eur"
315
            / "data"
316
            / "gem"
317
            / "Europe-Gas-Tracker-2024-05.xlsx",
318
            "wb",
319
        ) as outfile:
320
            outfile.write(r.content)
321
322
        if not (filepath / "pypsa-eur" / "data" / "gem").exists():
323
            (filepath / "pypsa-eur" / "data" / "gem").mkdir(
324
                parents=True, exist_ok=True
325
            )
326
327
        r = requests.get(
328
            "https://tubcloud.tu-berlin.de/s/Aqebo3rrQZWKGsG/download/"
329
            "Global-Steel-Plant-Tracker-April-2024-Standard-Copy-V1.xlsx"
330
        )
331
        with open(
332
            filepath
333
            / "pypsa-eur"
334
            / "data"
335
            / "gem"
336
            / "Global-Steel-Plant-Tracker-April-2024-Standard-Copy-V1.xlsx",
337
            "wb",
338
        ) as outfile:
339
            outfile.write(r.content)
340
341
    else:
342
        print("Pypsa-eur is not executed due to the settings of egon-data")
343
344
345
def prepare_network():
346
    cwd = Path(".")
347
    filepath = cwd / "run-pypsa-eur"
348
349
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
350
        subproc.run(
351
            [
352
                "snakemake",
353
                "-j1",
354
                "--directory",
355
                filepath,
356
                "--snakefile",
357
                filepath / "Snakefile",
358
                "--use-conda",
359
                "--conda-frontend=conda",
360
                "--cores",
361
                "8",
362
                "prepare",
363
            ]
364
        )
365
        execute()
366
367
        path = filepath / "pypsa-eur" / "results" / "prenetworks"
368
369
        path_2 = path / "prenetwork_post-manipulate_pre-solve"
370
        path_2.mkdir(parents=True, exist_ok=True)
371
372
        with open(
373
            __path__[0] + "/datasets/pypsaeur/config_prepare.yaml", "r"
374
        ) as stream:
375
            data_config = yaml.safe_load(stream)
376
377
        for i in range(0, len(data_config["scenario"]["planning_horizons"])):
378
            nc_file = (
379
                f"base_s_{data_config['scenario']['clusters'][0]}"
380
                f"_l{data_config['scenario']['ll'][0]}"
381
                f"_{data_config['scenario']['opts'][0]}"
382
                f"_{data_config['scenario']['sector_opts'][0]}"
383
                f"_{data_config['scenario']['planning_horizons'][i]}.nc"
384
            )
385
386
            shutil.copy(Path(path, nc_file), path_2)
387
388
    else:
389
        print("Pypsa-eur is not executed due to the settings of egon-data")
390
391
392
def prepare_network_2():
393
    cwd = Path(".")
394
    filepath = cwd / "run-pypsa-eur"
395
396
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
397
        shutil.copy(
398
            Path(__path__[0], "datasets", "pypsaeur", "config_solve.yaml"),
399
            filepath / "pypsa-eur" / "config" / "config.yaml",
400
        )
401
402
        subproc.run(
403
            [
404
                "snakemake",
405
                "-j1",
406
                "--directory",
407
                filepath,
408
                "--snakefile",
409
                filepath / "Snakefile",
410
                "--use-conda",
411
                "--conda-frontend=conda",
412
                "--cores",
413
                "8",
414
                "prepare",
415
            ]
416
        )
417
    else:
418
        print("Pypsa-eur is not executed due to the settings of egon-data")
419
420
421
def solve_network():
422
    cwd = Path(".")
423
    filepath = cwd / "run-pypsa-eur"
424
425
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
426
        subproc.run(
427
            [
428
                "snakemake",
429
                "-j1",
430
                "--cores",
431
                "8",
432
                "--directory",
433
                filepath,
434
                "--snakefile",
435
                filepath / "Snakefile",
436
                "--use-conda",
437
                "--conda-frontend=conda",
438
                "solve",
439
            ]
440
        )
441
442
        postprocessing_biomass_2045()
443
444
        subproc.run(
445
            [
446
                "snakemake",
447
                "-j1",
448
                "--directory",
449
                filepath,
450
                "--snakefile",
451
                filepath / "Snakefile",
452
                "--use-conda",
453
                "--conda-frontend=conda",
454
                "summary",
455
            ]
456
        )
457
    else:
458
        print("Pypsa-eur is not executed due to the settings of egon-data")
459
460
461 View Code Duplication
def read_network(planning_horizon=2045):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
462
    if config.settings()["egon-data"]["--run-pypsa-eur"]:
463
        with open(
464
            __path__[0] + "/datasets/pypsaeur/config_solve.yaml", "r"
465
        ) as stream:
466
            data_config = yaml.safe_load(stream)
467
468
        target_file = (
469
            Path(".")
470
            / "run-pypsa-eur"
471
            / "pypsa-eur"
472
            / "results"
473
            / data_config["run"]["name"]
474
            / "postnetworks"
475
            / f"base_s_{data_config['scenario']['clusters'][0]}"
476
            f"_l{data_config['scenario']['ll'][0]}"
477
            f"_{data_config['scenario']['opts'][0]}"
478
            f"_{data_config['scenario']['sector_opts'][0]}"
479
            f"_{planning_horizon}.nc"
480
        )
481
482
    else:
483
        target_file = (
484
            Path(".")
485
            / "data_bundle_egon_data"
486
            / "pypsa_eur"
487
            / "postnetworks"
488
            / f"base_s_39_lc1.25__cb40ex0-T-H-I-B-solar+p3-dist1_{planning_horizon}.nc"
489
        )
490
491
    return pypsa.Network(target_file)
492
493
494
def clean_database():
495
    """Remove all components abroad for eGon100RE of the database
496
497
    Remove all components abroad and their associated time series of
498
    the datase for the scenario 'eGon100RE'.
499
500
    Parameters
501
    ----------
502
    None
503
504
    Returns
505
    -------
506
    None
507
508
    """
509
    scn_name = "eGon100RE"
510
511
    comp_one_port = ["load", "generator", "store", "storage"]
512
513
    # delete existing components and associated timeseries
514
    for comp in comp_one_port:
515
        db.execute_sql(
516
            f"""
517
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
518
            WHERE {comp + "_id"} IN (
519
                SELECT {comp + "_id"} FROM {"grid.egon_etrago_" + comp}
520
                WHERE bus IN (
521
                    SELECT bus_id FROM grid.egon_etrago_bus
522
                    WHERE country != 'DE'
523
                    AND scn_name = '{scn_name}')
524
                AND scn_name = '{scn_name}'
525
            );
526
527
            DELETE FROM {"grid.egon_etrago_" + comp}
528
            WHERE bus IN (
529
                SELECT bus_id FROM grid.egon_etrago_bus
530
                WHERE country != 'DE'
531
                AND scn_name = '{scn_name}')
532
            AND scn_name = '{scn_name}';"""
533
        )
534
535
    comp_2_ports = [
536
        "line",
537
        "link",
538
    ]
539
540
    for comp, id in zip(comp_2_ports, ["line_id", "link_id"]):
541
        db.execute_sql(
542
            f"""
543
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
544
            WHERE scn_name = '{scn_name}'
545
            AND {id} IN (
546
                SELECT {id} FROM {"grid.egon_etrago_" + comp}
547
            WHERE "bus0" IN (
548
            SELECT bus_id FROM grid.egon_etrago_bus
549
                WHERE country != 'DE'
550
                AND scn_name = '{scn_name}'
551
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
552
            AND "bus1" IN (
553
            SELECT bus_id FROM grid.egon_etrago_bus
554
                WHERE country != 'DE'
555
                AND scn_name = '{scn_name}'
556
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
557
            );
558
559
560
            DELETE FROM {"grid.egon_etrago_" + comp}
561
            WHERE scn_name = '{scn_name}'
562
            AND "bus0" IN (
563
            SELECT bus_id FROM grid.egon_etrago_bus
564
                WHERE country != 'DE'
565
                AND scn_name = '{scn_name}'
566
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
567
            AND "bus1" IN (
568
            SELECT bus_id FROM grid.egon_etrago_bus
569
                WHERE country != 'DE'
570
                AND scn_name = '{scn_name}'
571
                AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data))
572
            ;"""
573
        )
574
575
    db.execute_sql(
576
        f"""
577
        DELETE FROM grid.egon_etrago_bus
578
        WHERE scn_name = '{scn_name}'
579
        AND country <> 'DE'
580
        AND carrier <> 'AC'
581
        """
582
    )
583
584
585
def electrical_neighbours_egon100():
586
    if "eGon100RE" in egon.data.config.settings()["egon-data"]["--scenarios"]:
587
        neighbor_reduction()
588
589
    else:
590
        print(
591
            "eGon100RE is not in the list of created scenarios, this task is skipped."
592
        )
593
594
595
def combine_decentral_and_rural_heat(network_solved, network_prepared):
596
597
    for comp in network_solved.iterate_components():
598
599
        if comp.name in ["Bus", "Link", "Store"]:
600
            urban_decentral = comp.df[
601
                comp.df.carrier.str.contains("urban decentral")
602
            ]
603
            rural = comp.df[comp.df.carrier.str.contains("rural")]
604
            for i, row in urban_decentral.iterrows():
605
                if not "DE" in i:
606
                    if comp.name in ["Bus"]:
607
                        network_solved.remove("Bus", i)
608
                    if comp.name in ["Link", "Generator"]:
609
                        if (
610
                            i.replace("urban decentral", "rural")
611
                            in rural.index
612
                        ):
613
                            rural.loc[
614
                                i.replace("urban decentral", "rural"),
615
                                "p_nom_opt",
616
                            ] += urban_decentral.loc[i, "p_nom_opt"]
617
                            rural.loc[
618
                                i.replace("urban decentral", "rural"), "p_nom"
619
                            ] += urban_decentral.loc[i, "p_nom"]
620
                            network_solved.remove(comp.name, i)
621
                        else:
622
                            print(i)
623
                            comp.df.loc[i, "bus0"] = comp.df.loc[
624
                                i, "bus0"
625
                            ].replace("urban decentral", "rural")
626
                            comp.df.loc[i, "bus1"] = comp.df.loc[
627
                                i, "bus1"
628
                            ].replace("urban decentral", "rural")
629
                            comp.df.loc[i, "carrier"] = comp.df.loc[
630
                                i, "carrier"
631
                            ].replace("urban decentral", "rural")
632
                    if comp.name in ["Store"]:
633
                        if (
634
                            i.replace("urban decentral", "rural")
635
                            in rural.index
636
                        ):
637
                            rural.loc[
638
                                i.replace("urban decentral", "rural"),
639
                                "e_nom_opt",
640
                            ] += urban_decentral.loc[i, "e_nom_opt"]
641
                            rural.loc[
642
                                i.replace("urban decentral", "rural"), "e_nom"
643
                            ] += urban_decentral.loc[i, "e_nom"]
644
                            network_solved.remove(comp.name, i)
645
646
                        else:
647
                            print(i)
648
                            network_solved.stores.loc[i, "bus"] = (
649
                                network_solved.stores.loc[i, "bus"].replace(
650
                                    "urban decentral", "rural"
651
                                )
652
                            )
653
                            network_solved.stores.loc[i, "carrier"] = (
654
                                "rural water tanks"
655
                            )
656
657
    urban_decentral_loads = network_prepared.loads[
658
        network_prepared.loads.carrier.str.contains("urban decentral")
659
    ]
660
661
    for i, row in urban_decentral_loads.iterrows():
662
        if i in network_prepared.loads_t.p_set.columns:
663
            network_prepared.loads_t.p_set[
664
                i.replace("urban decentral", "rural")
665
            ] += network_prepared.loads_t.p_set[i]
666
    network_prepared.mremove("Load", urban_decentral_loads.index)
667
668
    return network_prepared, network_solved
669
670
671
def neighbor_reduction():
672
    network_solved = read_network(planning_horizon=2045)
673
    network_prepared = prepared_network(planning_horizon="2045")
674
675
    # network.links.drop("pipe_retrofit", axis="columns", inplace=True)
676
677
    wanted_countries = countries_list()
678
679
    foreign_buses = network_solved.buses[
680
        (~network_solved.buses.index.str.contains("|".join(wanted_countries)))
681
        | (network_solved.buses.index.str.contains("FR6"))
682
    ]
683
    network_solved.buses = network_solved.buses.drop(
684
        network_solved.buses.loc[foreign_buses.index].index
685
    )
686
687
    # Add H2 demand of Fischer-Tropsch process and methanolisation
688
    # to industrial H2 demands
689
    industrial_hydrogen = network_prepared.loads.loc[
690
        network_prepared.loads.carrier == "H2 for industry"
691
    ]
692
    fischer_tropsch = (
693
        network_solved.links_t.p0[
694
            network_solved.links.loc[
695
                network_solved.links.carrier == "Fischer-Tropsch"
696
            ].index
697
        ]
698
        .mul(network_solved.snapshot_weightings.generators, axis=0)
699
        .sum()
700
    )
701
    methanolisation = (
702
        network_solved.links_t.p0[
703
            network_solved.links.loc[
704
                network_solved.links.carrier == "methanolisation"
705
            ].index
706
        ]
707
        .mul(network_solved.snapshot_weightings.generators, axis=0)
708
        .sum()
709
    )
710
    for i, row in industrial_hydrogen.iterrows():
711
        network_prepared.loads.loc[i, "p_set"] += (
712
            fischer_tropsch[
713
                fischer_tropsch.index.str.startswith(row.bus[:5])
714
            ].sum()
715
            / 8760
716
        )
717
        network_prepared.loads.loc[i, "p_set"] += (
718
            methanolisation[
719
                methanolisation.index.str.startswith(row.bus[:5])
720
            ].sum()
721
            / 8760
722
        )
723
    # drop foreign lines and links from the 2nd row
724
725
    network_solved.lines = network_solved.lines.drop(
726
        network_solved.lines[
727
            (
728
                network_solved.lines["bus0"].isin(network_solved.buses.index)
729
                == False
730
            )
731
            & (
732
                network_solved.lines["bus1"].isin(network_solved.buses.index)
733
                == False
734
            )
735
        ].index
736
    )
737
738
    # select all lines which have at bus1 the bus which is kept
739
    lines_cb_1 = network_solved.lines[
740
        (
741
            network_solved.lines["bus0"].isin(network_solved.buses.index)
742
            == False
743
        )
744
    ]
745
746
    # create a load at bus1 with the line's hourly loading
747
    for i, k in zip(lines_cb_1.bus1.values, lines_cb_1.index):
748
749
        # Copy loading of lines into hourly resolution
750
        pset = pd.Series(
751
            index=network_prepared.snapshots,
752
            data=network_solved.lines_t.p1[k].resample("H").ffill(),
753
        )
754
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
755
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
756
757
        # Loads are all imported from the prepared network in the end
758
        network_prepared.add(
759
            "Load",
760
            "slack_fix " + i + " " + k,
761
            bus=i,
762
            p_set=pset,
763
            carrier=lines_cb_1.loc[k, "carrier"],
764
        )
765
766
    # select all lines which have at bus0 the bus which is kept
767
    lines_cb_0 = network_solved.lines[
768
        (
769
            network_solved.lines["bus1"].isin(network_solved.buses.index)
770
            == False
771
        )
772
    ]
773
774
    # create a load at bus0 with the line's hourly loading
775
    for i, k in zip(lines_cb_0.bus0.values, lines_cb_0.index):
776
        # Copy loading of lines into hourly resolution
777
        pset = pd.Series(
778
            index=network_prepared.snapshots,
779
            data=network_solved.lines_t.p0[k].resample("H").ffill(),
780
        )
781
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
782
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
783
784
        network_prepared.add(
785
            "Load",
786
            "slack_fix " + i + " " + k,
787
            bus=i,
788
            p_set=pset,
789
            carrier=lines_cb_0.loc[k, "carrier"],
790
        )
791
792
    # do the same for links
793
    network_solved.mremove(
794
        "Link",
795
        network_solved.links[
796
            (~network_solved.links.bus0.isin(network_solved.buses.index))
797
            | (~network_solved.links.bus1.isin(network_solved.buses.index))
798
        ].index,
799
    )
800
801
    # select all links which have at bus1 the bus which is kept
802
    links_cb_1 = network_solved.links[
803
        (
804
            network_solved.links["bus0"].isin(network_solved.buses.index)
805
            == False
806
        )
807
    ]
808
809
    # create a load at bus1 with the link's hourly loading
810
    for i, k in zip(links_cb_1.bus1.values, links_cb_1.index):
811
        pset = pd.Series(
812
            index=network_prepared.snapshots,
813
            data=network_solved.links_t.p1[k].resample("H").ffill(),
814
        )
815
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
816
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
817
818
        network_prepared.add(
819
            "Load",
820
            "slack_fix_links " + i + " " + k,
821
            bus=i,
822
            p_set=pset,
823
            carrier=links_cb_1.loc[k, "carrier"],
824
        )
825
826
    # select all links which have at bus0 the bus which is kept
827
    links_cb_0 = network_solved.links[
828
        (
829
            network_solved.links["bus1"].isin(network_solved.buses.index)
830
            == False
831
        )
832
    ]
833
834
    # create a load at bus0 with the link's hourly loading
835
    for i, k in zip(links_cb_0.bus0.values, links_cb_0.index):
836
        pset = pd.Series(
837
            index=network_prepared.snapshots,
838
            data=network_solved.links_t.p0[k].resample("H").ffill(),
839
        )
840
        pset["2011-12-31 22:00:00"] = pset["2011-12-31 21:00:00"]
841
        pset["2011-12-31 23:00:00"] = pset["2011-12-31 21:00:00"]
842
843
        network_prepared.add(
844
            "Load",
845
            "slack_fix_links " + i + " " + k,
846
            bus=i,
847
            p_set=pset,
848
            carrier=links_cb_0.carrier[k],
849
        )
850
851
    # drop remaining foreign components
852
    for comp in network_solved.iterate_components():
853
        if "bus0" in comp.df.columns:
854
            network_solved.mremove(
855
                comp.name,
856
                comp.df[~comp.df.bus0.isin(network_solved.buses.index)].index,
857
            )
858
            network_solved.mremove(
859
                comp.name,
860
                comp.df[~comp.df.bus1.isin(network_solved.buses.index)].index,
861
            )
862
        elif "bus" in comp.df.columns:
863
            network_solved.mremove(
864
                comp.name,
865
                comp.df[~comp.df.bus.isin(network_solved.buses.index)].index,
866
            )
867
868
    # Combine urban decentral and rural heat
869
    network_prepared, network_solved = combine_decentral_and_rural_heat(
870
        network_solved, network_prepared
871
    )
872
873
    # writing components of neighboring countries to etrago tables
874
875
    # Set country tag for all buses
876
    network_solved.buses.country = network_solved.buses.index.str[:2]
877
    neighbors = network_solved.buses[network_solved.buses.country != "DE"]
878
879
    neighbors["new_index"] = (
880
        db.next_etrago_id("bus") + neighbors.reset_index().index
881
    )
882
883
    # Use index of AC buses created by electrical_neigbors
884
    foreign_ac_buses = db.select_dataframe(
885
        """
886
        SELECT * FROM grid.egon_etrago_bus
887
        WHERE carrier = 'AC' AND v_nom = 380
888
        AND country!= 'DE' AND scn_name ='eGon100RE'
889
        AND bus_id NOT IN (SELECT bus_i FROM osmtgmod_results.bus_data)
890
        """
891
    )
892
    buses_with_defined_id = neighbors[
893
        (neighbors.carrier == "AC")
894
        & (neighbors.country.isin(foreign_ac_buses.country.values))
895
    ].index
896
    neighbors.loc[buses_with_defined_id, "new_index"] = (
897
        foreign_ac_buses.set_index("x")
898
        .loc[neighbors.loc[buses_with_defined_id, "x"]]
899
        .bus_id.values
900
    )
901
902
    # lines, the foreign crossborder lines
903
    # (without crossborder lines to Germany!)
904
905
    neighbor_lines = network_solved.lines[
906
        network_solved.lines.bus0.isin(neighbors.index)
907
        & network_solved.lines.bus1.isin(neighbors.index)
908
    ]
909
    if not network_solved.lines_t["s_max_pu"].empty:
910
        neighbor_lines_t = network_prepared.lines_t["s_max_pu"][
911
            neighbor_lines.index
912
        ]
913
914
    neighbor_lines.reset_index(inplace=True)
915
    neighbor_lines.bus0 = (
916
        neighbors.loc[neighbor_lines.bus0, "new_index"].reset_index().new_index
917
    )
918
    neighbor_lines.bus1 = (
919
        neighbors.loc[neighbor_lines.bus1, "new_index"].reset_index().new_index
920
    )
921
    neighbor_lines.index += db.next_etrago_id("line")
922
923
    if not network_solved.lines_t["s_max_pu"].empty:
924
        for i in neighbor_lines_t.columns:
0 ignored issues
show
introduced by
The variable neighbor_lines_t does not seem to be defined in case BooleanNotNode on line 909 is False. Are you sure this can never be the case?
Loading history...
925
            new_index = neighbor_lines[neighbor_lines["name"] == i].index
926
            neighbor_lines_t.rename(columns={i: new_index[0]}, inplace=True)
927
928
    # links
929
    neighbor_links = network_solved.links[
930
        network_solved.links.bus0.isin(neighbors.index)
931
        & network_solved.links.bus1.isin(neighbors.index)
932
    ]
933
934
    neighbor_links.reset_index(inplace=True)
935
    neighbor_links.bus0 = (
936
        neighbors.loc[neighbor_links.bus0, "new_index"].reset_index().new_index
937
    )
938
    neighbor_links.bus1 = (
939
        neighbors.loc[neighbor_links.bus1, "new_index"].reset_index().new_index
940
    )
941
    neighbor_links.index += db.next_etrago_id("link")
942
943
    # generators
944
    neighbor_gens = network_solved.generators[
945
        network_solved.generators.bus.isin(neighbors.index)
946
    ]
947
    neighbor_gens_t = network_prepared.generators_t["p_max_pu"][
948
        neighbor_gens[
949
            neighbor_gens.index.isin(
950
                network_prepared.generators_t["p_max_pu"].columns
951
            )
952
        ].index
953
    ]
954
955
    gen_time = [
956
        "solar",
957
        "onwind",
958
        "solar rooftop",
959
        "offwind-ac",
960
        "offwind-dc",
961
        "solar-hsat",
962
        "urban central solar thermal",
963
        "rural solar thermal",
964
        "offwind-float",
965
    ]
966
967
    missing_gent = neighbor_gens[
968
        neighbor_gens["carrier"].isin(gen_time)
969
        & ~neighbor_gens.index.isin(neighbor_gens_t.columns)
970
    ].index
971
972
    gen_timeseries = network_prepared.generators_t["p_max_pu"].copy()
973
    for mgt in missing_gent:  # mgt: missing generator timeseries
974
        try:
975
            neighbor_gens_t[mgt] = gen_timeseries.loc[:, mgt[0:-5]]
976
        except:
977
            print(f"There are not timeseries for {mgt}")
978
979
    neighbor_gens.reset_index(inplace=True)
980
    neighbor_gens.bus = (
981
        neighbors.loc[neighbor_gens.bus, "new_index"].reset_index().new_index
982
    )
983
    neighbor_gens.index += db.next_etrago_id("generator")
984
985
    for i in neighbor_gens_t.columns:
986
        new_index = neighbor_gens[neighbor_gens["Generator"] == i].index
987
        neighbor_gens_t.rename(columns={i: new_index[0]}, inplace=True)
988
989
    # loads
990
    # imported from prenetwork in 1h-resolution
991
    neighbor_loads = network_prepared.loads[
992
        network_prepared.loads.bus.isin(neighbors.index)
993
    ]
994
    neighbor_loads_t_index = neighbor_loads.index[
995
        neighbor_loads.index.isin(network_prepared.loads_t.p_set.columns)
996
    ]
997
    neighbor_loads_t = network_prepared.loads_t["p_set"][
998
        neighbor_loads_t_index
999
    ]
1000
1001
    neighbor_loads.reset_index(inplace=True)
1002
    neighbor_loads.bus = (
1003
        neighbors.loc[neighbor_loads.bus, "new_index"].reset_index().new_index
1004
    )
1005
    neighbor_loads.index += db.next_etrago_id("load")
1006
1007
    for i in neighbor_loads_t.columns:
1008
        new_index = neighbor_loads[neighbor_loads["Load"] == i].index
1009
        neighbor_loads_t.rename(columns={i: new_index[0]}, inplace=True)
1010
1011
    # stores
1012
    neighbor_stores = network_solved.stores[
1013
        network_solved.stores.bus.isin(neighbors.index)
1014
    ]
1015
    neighbor_stores_t_index = neighbor_stores.index[
1016
        neighbor_stores.index.isin(network_solved.stores_t.e_min_pu.columns)
1017
    ]
1018
    neighbor_stores_t = network_prepared.stores_t["e_min_pu"][
1019
        neighbor_stores_t_index
1020
    ]
1021
1022
    neighbor_stores.reset_index(inplace=True)
1023
    neighbor_stores.bus = (
1024
        neighbors.loc[neighbor_stores.bus, "new_index"].reset_index().new_index
1025
    )
1026
    neighbor_stores.index += db.next_etrago_id("store")
1027
1028
    for i in neighbor_stores_t.columns:
1029
        new_index = neighbor_stores[neighbor_stores["Store"] == i].index
1030
        neighbor_stores_t.rename(columns={i: new_index[0]}, inplace=True)
1031
1032
    # storage_units
1033
    neighbor_storage = network_solved.storage_units[
1034
        network_solved.storage_units.bus.isin(neighbors.index)
1035
    ]
1036
    neighbor_storage_t_index = neighbor_storage.index[
1037
        neighbor_storage.index.isin(
1038
            network_solved.storage_units_t.inflow.columns
1039
        )
1040
    ]
1041
    neighbor_storage_t = network_prepared.storage_units_t["inflow"][
1042
        neighbor_storage_t_index
1043
    ]
1044
1045
    neighbor_storage.reset_index(inplace=True)
1046
    neighbor_storage.bus = (
1047
        neighbors.loc[neighbor_storage.bus, "new_index"]
1048
        .reset_index()
1049
        .new_index
1050
    )
1051
    neighbor_storage.index += db.next_etrago_id("storage")
1052
1053
    for i in neighbor_storage_t.columns:
1054
        new_index = neighbor_storage[
1055
            neighbor_storage["StorageUnit"] == i
1056
        ].index
1057
        neighbor_storage_t.rename(columns={i: new_index[0]}, inplace=True)
1058
1059
    # Connect to local database
1060
    engine = db.engine()
1061
1062
    neighbors["scn_name"] = "eGon100RE"
1063
    neighbors.index = neighbors["new_index"]
1064
1065
    # Correct geometry for non AC buses
1066
    carriers = set(neighbors.carrier.to_list())
1067
    carriers = [e for e in carriers if e not in ("AC")]
1068
    non_AC_neighbors = pd.DataFrame()
1069
    for c in carriers:
1070
        c_neighbors = neighbors[neighbors.carrier == c].set_index(
1071
            "location", drop=False
1072
        )
1073
        for i in ["x", "y"]:
1074
            c_neighbors = c_neighbors.drop(i, axis=1)
1075
        coordinates = neighbors[neighbors.carrier == "AC"][
1076
            ["location", "x", "y"]
1077
        ].set_index("location")
1078
        c_neighbors = pd.concat([coordinates, c_neighbors], axis=1).set_index(
1079
            "new_index", drop=False
1080
        )
1081
        non_AC_neighbors = pd.concat([non_AC_neighbors, c_neighbors])
1082
1083
    neighbors = pd.concat(
1084
        [neighbors[neighbors.carrier == "AC"], non_AC_neighbors]
1085
    )
1086
1087
    for i in [
1088
        "new_index",
1089
        "control",
1090
        "generator",
1091
        "location",
1092
        "sub_network",
1093
        "unit",
1094
        "substation_lv",
1095
        "substation_off",
1096
    ]:
1097
        neighbors = neighbors.drop(i, axis=1)
1098
1099
    # Add geometry column
1100
    neighbors = (
1101
        gpd.GeoDataFrame(
1102
            neighbors, geometry=gpd.points_from_xy(neighbors.x, neighbors.y)
1103
        )
1104
        .rename_geometry("geom")
1105
        .set_crs(4326)
1106
    )
1107
1108
    # Unify carrier names
1109
    neighbors.carrier = neighbors.carrier.str.replace(" ", "_")
1110
    neighbors.carrier.replace(
1111
        {
1112
            "gas": "CH4",
1113
            "gas_for_industry": "CH4_for_industry",
1114
            "urban_central_heat": "central_heat",
1115
            "EV_battery": "Li_ion",
1116
            "urban_central_water_tanks": "central_heat_store",
1117
            "rural_water_tanks": "rural_heat_store",
1118
        },
1119
        inplace=True,
1120
    )
1121
1122
    neighbors[~neighbors.carrier.isin(["AC"])].to_postgis(
1123
        "egon_etrago_bus",
1124
        engine,
1125
        schema="grid",
1126
        if_exists="append",
1127
        index=True,
1128
        index_label="bus_id",
1129
    )
1130
1131
    # prepare and write neighboring crossborder lines to etrago tables
1132
    def lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE"):
1133
        neighbor_lines["scn_name"] = scn
1134
        neighbor_lines["cables"] = 3 * neighbor_lines["num_parallel"].astype(
1135
            int
1136
        )
1137
        neighbor_lines["s_nom"] = neighbor_lines["s_nom_min"]
1138
1139
        for i in [
1140
            "Line",
1141
            "x_pu_eff",
1142
            "r_pu_eff",
1143
            "sub_network",
1144
            "x_pu",
1145
            "r_pu",
1146
            "g_pu",
1147
            "b_pu",
1148
            "s_nom_opt",
1149
            "i_nom",
1150
            "dc",
1151
        ]:
1152
            neighbor_lines = neighbor_lines.drop(i, axis=1)
1153
1154
        # Define geometry and add to lines dataframe as 'topo'
1155
        gdf = gpd.GeoDataFrame(index=neighbor_lines.index)
1156
        gdf["geom_bus0"] = neighbors.geom[neighbor_lines.bus0].values
1157
        gdf["geom_bus1"] = neighbors.geom[neighbor_lines.bus1].values
1158
        gdf["geometry"] = gdf.apply(
1159
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
1160
        )
1161
1162
        neighbor_lines = (
1163
            gpd.GeoDataFrame(neighbor_lines, geometry=gdf["geometry"])
1164
            .rename_geometry("topo")
1165
            .set_crs(4326)
1166
        )
1167
1168
        neighbor_lines["lifetime"] = get_sector_parameters("electricity", scn)[
1169
            "lifetime"
1170
        ]["ac_ehv_overhead_line"]
1171
1172
        neighbor_lines.to_postgis(
1173
            "egon_etrago_line",
1174
            engine,
1175
            schema="grid",
1176
            if_exists="append",
1177
            index=True,
1178
            index_label="line_id",
1179
        )
1180
1181
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE")
1182
1183
    def links_to_etrago(neighbor_links, scn="eGon100RE", extendable=True):
1184
        """Prepare and write neighboring crossborder links to eTraGo table
1185
1186
        This function prepare the neighboring crossborder links
1187
        generated the PyPSA-eur-sec (p-e-s) run by:
1188
          * Delete the useless columns
1189
          * If extendable is false only (non default case):
1190
              * Replace p_nom = 0 with the p_nom_op values (arrising
1191
                from the p-e-s optimisation)
1192
              * Setting p_nom_extendable to false
1193
          * Add geomtry to the links: 'geom' and 'topo' columns
1194
          * Change the name of the carriers to have the consistent in
1195
            eGon-data
1196
1197
        The function insert then the link to the eTraGo table and has
1198
        no return.
1199
1200
        Parameters
1201
        ----------
1202
        neighbor_links : pandas.DataFrame
1203
            Dataframe containing the neighboring crossborder links
1204
        scn_name : str
1205
            Name of the scenario
1206
        extendable : bool
1207
            Boolean expressing if the links should be extendable or not
1208
1209
        Returns
1210
        -------
1211
        None
1212
1213
        """
1214
        neighbor_links["scn_name"] = scn
1215
1216
        dropped_carriers = [
1217
            "Link",
1218
            "geometry",
1219
            "tags",
1220
            "under_construction",
1221
            "underground",
1222
            "underwater_fraction",
1223
            "bus2",
1224
            "bus3",
1225
            "bus4",
1226
            "efficiency2",
1227
            "efficiency3",
1228
            "efficiency4",
1229
            "lifetime",
1230
            "pipe_retrofit",
1231
            "committable",
1232
            "start_up_cost",
1233
            "shut_down_cost",
1234
            "min_up_time",
1235
            "min_down_time",
1236
            "up_time_before",
1237
            "down_time_before",
1238
            "ramp_limit_up",
1239
            "ramp_limit_down",
1240
            "ramp_limit_start_up",
1241
            "ramp_limit_shut_down",
1242
            "length_original",
1243
            "reversed",
1244
            "location",
1245
            "project_status",
1246
            "dc",
1247
            "voltage",
1248
        ]
1249
1250
        if extendable:
1251
            dropped_carriers.append("p_nom_opt")
1252
            neighbor_links = neighbor_links.drop(
1253
                columns=dropped_carriers,
1254
                errors="ignore",
1255
            )
1256
1257
        else:
1258
            dropped_carriers.append("p_nom")
1259
            dropped_carriers.append("p_nom_extendable")
1260
            neighbor_links = neighbor_links.drop(
1261
                columns=dropped_carriers,
1262
                errors="ignore",
1263
            )
1264
            neighbor_links = neighbor_links.rename(
1265
                columns={"p_nom_opt": "p_nom"}
1266
            )
1267
            neighbor_links["p_nom_extendable"] = False
1268
1269
        if neighbor_links.empty:
1270
            print("No links selected")
1271
            return
1272
1273
        # Define geometry and add to lines dataframe as 'topo'
1274
        gdf = gpd.GeoDataFrame(
1275
            index=neighbor_links.index,
1276
            data={
1277
                "geom_bus0": neighbors.loc[neighbor_links.bus0, "geom"].values,
1278
                "geom_bus1": neighbors.loc[neighbor_links.bus1, "geom"].values,
1279
            },
1280
        )
1281
1282
        gdf["geometry"] = gdf.apply(
1283
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
1284
        )
1285
1286
        neighbor_links = (
1287
            gpd.GeoDataFrame(neighbor_links, geometry=gdf["geometry"])
1288
            .rename_geometry("topo")
1289
            .set_crs(4326)
1290
        )
1291
1292
        # Unify carrier names
1293
        neighbor_links.carrier = neighbor_links.carrier.str.replace(" ", "_")
1294
1295
        neighbor_links.carrier.replace(
1296
            {
1297
                "H2_Electrolysis": "power_to_H2",
1298
                "H2_Fuel_Cell": "H2_to_power",
1299
                "H2_pipeline_retrofitted": "H2_retrofit",
1300
                "SMR": "CH4_to_H2",
1301
                "Sabatier": "H2_to_CH4",
1302
                "gas_for_industry": "CH4_for_industry",
1303
                "gas_pipeline": "CH4",
1304
                "urban_central_gas_boiler": "central_gas_boiler",
1305
                "urban_central_resistive_heater": "central_resistive_heater",
1306
                "urban_central_water_tanks_charger": "central_heat_store_charger",
1307
                "urban_central_water_tanks_discharger": "central_heat_store_discharger",
1308
                "rural_water_tanks_charger": "rural_heat_store_charger",
1309
                "rural_water_tanks_discharger": "rural_heat_store_discharger",
1310
                "urban_central_gas_CHP": "central_gas_CHP",
1311
                "urban_central_air_heat_pump": "central_heat_pump",
1312
                "rural_ground_heat_pump": "rural_heat_pump",
1313
            },
1314
            inplace=True,
1315
        )
1316
1317
        H2_links = {
1318
            "H2_to_CH4": "H2_to_CH4",
1319
            "H2_to_power": "H2_to_power",
1320
            "power_to_H2": "power_to_H2_system",
1321
            "CH4_to_H2": "CH4_to_H2",
1322
        }
1323
1324
        for c in H2_links.keys():
1325
1326
            neighbor_links.loc[
1327
                (neighbor_links.carrier == c),
1328
                "lifetime",
1329
            ] = get_sector_parameters("gas", "eGon100RE")["lifetime"][
1330
                H2_links[c]
1331
            ]
1332
1333
        neighbor_links.to_postgis(
1334
            "egon_etrago_link",
1335
            engine,
1336
            schema="grid",
1337
            if_exists="append",
1338
            index=True,
1339
            index_label="link_id",
1340
        )
1341
1342
    extendable_links_carriers = [
1343
        "battery charger",
1344
        "battery discharger",
1345
        "home battery charger",
1346
        "home battery discharger",
1347
        "rural water tanks charger",
1348
        "rural water tanks discharger",
1349
        "urban central water tanks charger",
1350
        "urban central water tanks discharger",
1351
        "urban decentral water tanks charger",
1352
        "urban decentral water tanks discharger",
1353
        "H2 Electrolysis",
1354
        "H2 Fuel Cell",
1355
        "SMR",
1356
        "Sabatier",
1357
    ]
1358
1359
    # delete unwanted carriers for eTraGo
1360
    excluded_carriers = [
1361
        "gas for industry CC",
1362
        "SMR CC",
1363
        "DAC",
1364
    ]
1365
    neighbor_links = neighbor_links[
1366
        ~neighbor_links.carrier.isin(excluded_carriers)
1367
    ]
1368
1369
    # Combine CHP_CC and CHP
1370
    chp_cc = neighbor_links[
1371
        neighbor_links.carrier == "urban central gas CHP CC"
1372
    ]
1373
    for index, row in chp_cc.iterrows():
1374
        neighbor_links.loc[
1375
            neighbor_links.Link == row.Link.replace("CHP CC", "CHP"),
1376
            "p_nom_opt",
1377
        ] += row.p_nom_opt
1378
        neighbor_links.loc[
1379
            neighbor_links.Link == row.Link.replace("CHP CC", "CHP"), "p_nom"
1380
        ] += row.p_nom
1381
        neighbor_links.drop(index, inplace=True)
1382
1383
    # Combine heat pumps
1384
    # Like in Germany, there are air heat pumps in central heat grids
1385
    # and ground heat pumps in rural areas
1386
    rural_air = neighbor_links[neighbor_links.carrier == "rural air heat pump"]
1387
    for index, row in rural_air.iterrows():
1388
        neighbor_links.loc[
1389
            neighbor_links.Link == row.Link.replace("air", "ground"),
1390
            "p_nom_opt",
1391
        ] += row.p_nom_opt
1392
        neighbor_links.loc[
1393
            neighbor_links.Link == row.Link.replace("air", "ground"), "p_nom"
1394
        ] += row.p_nom
1395
        neighbor_links.drop(index, inplace=True)
1396
    links_to_etrago(
1397
        neighbor_links[neighbor_links.carrier.isin(extendable_links_carriers)],
1398
        "eGon100RE",
1399
    )
1400
    links_to_etrago(
1401
        neighbor_links[
1402
            ~neighbor_links.carrier.isin(extendable_links_carriers)
1403
        ],
1404
        "eGon100RE",
1405
        extendable=False,
1406
    )
1407
    # Include links time-series
1408
    # For heat_pumps
1409
    hp = neighbor_links[neighbor_links["carrier"].str.contains("heat pump")]
1410
1411
    neighbor_eff_t = network_prepared.links_t["efficiency"][
1412
        hp[hp.Link.isin(network_prepared.links_t["efficiency"].columns)].index
1413
    ]
1414
1415
    missing_hp = hp[~hp["Link"].isin(neighbor_eff_t.columns)].Link
1416
1417
    eff_timeseries = network_prepared.links_t["efficiency"].copy()
1418
    for met in missing_hp:  # met: missing efficiency timeseries
1419
        try:
1420
            neighbor_eff_t[met] = eff_timeseries.loc[:, met[0:-5]]
1421
        except:
1422
            print(f"There are not timeseries for heat_pump {met}")
1423
1424
    for i in neighbor_eff_t.columns:
1425
        new_index = neighbor_links[neighbor_links["Link"] == i].index
1426
        neighbor_eff_t.rename(columns={i: new_index[0]}, inplace=True)
1427
1428
    # Include links time-series
1429
    # For ev_chargers
1430
    ev = neighbor_links[neighbor_links["carrier"].str.contains("BEV charger")]
1431
1432
    ev_p_max_pu = network_prepared.links_t["p_max_pu"][
1433
        ev[ev.Link.isin(network_prepared.links_t["p_max_pu"].columns)].index
1434
    ]
1435
1436
    missing_ev = ev[~ev["Link"].isin(ev_p_max_pu.columns)].Link
1437
1438
    ev_p_max_pu_timeseries = network_prepared.links_t["p_max_pu"].copy()
1439
    for mct in missing_ev:  # evt: missing charger timeseries
1440
        try:
1441
            ev_p_max_pu[mct] = ev_p_max_pu_timeseries.loc[:, mct[0:-5]]
1442
        except:
1443
            print(f"There are not timeseries for EV charger {mct}")
1444
1445
    for i in ev_p_max_pu.columns:
1446
        new_index = neighbor_links[neighbor_links["Link"] == i].index
1447
        ev_p_max_pu.rename(columns={i: new_index[0]}, inplace=True)
1448
1449
    # prepare neighboring generators for etrago tables
1450
    neighbor_gens["scn_name"] = "eGon100RE"
1451
    neighbor_gens["p_nom"] = neighbor_gens["p_nom_opt"]
1452
    neighbor_gens["p_nom_extendable"] = False
1453
1454
    # Unify carrier names
1455
    neighbor_gens.carrier = neighbor_gens.carrier.str.replace(" ", "_")
1456
1457
    neighbor_gens.carrier.replace(
1458
        {
1459
            "onwind": "wind_onshore",
1460
            "ror": "run_of_river",
1461
            "offwind-ac": "wind_offshore",
1462
            "offwind-dc": "wind_offshore",
1463
            "offwind-float": "wind_offshore",
1464
            "urban_central_solar_thermal": "urban_central_solar_thermal_collector",
1465
            "residential_rural_solar_thermal": "residential_rural_solar_thermal_collector",
1466
            "services_rural_solar_thermal": "services_rural_solar_thermal_collector",
1467
            "solar-hsat": "solar",
1468
        },
1469
        inplace=True,
1470
    )
1471
1472
    for i in [
1473
        "Generator",
1474
        "weight",
1475
        "lifetime",
1476
        "p_set",
1477
        "q_set",
1478
        "p_nom_opt",
1479
        "e_sum_min",
1480
        "e_sum_max",
1481
    ]:
1482
        neighbor_gens = neighbor_gens.drop(i, axis=1)
1483
1484
    neighbor_gens.to_sql(
1485
        "egon_etrago_generator",
1486
        engine,
1487
        schema="grid",
1488
        if_exists="append",
1489
        index=True,
1490
        index_label="generator_id",
1491
    )
1492
1493
    # prepare neighboring loads for etrago tables
1494
    neighbor_loads["scn_name"] = "eGon100RE"
1495
1496
    # Unify carrier names
1497
    neighbor_loads.carrier = neighbor_loads.carrier.str.replace(" ", "_")
1498
1499
    neighbor_loads.carrier.replace(
1500
        {
1501
            "electricity": "AC",
1502
            "DC": "AC",
1503
            "industry_electricity": "AC",
1504
            "H2_pipeline_retrofitted": "H2_system_boundary",
1505
            "gas_pipeline": "CH4_system_boundary",
1506
            "gas_for_industry": "CH4_for_industry",
1507
            "urban_central_heat": "central_heat",
1508
        },
1509
        inplace=True,
1510
    )
1511
1512
    neighbor_loads = neighbor_loads.drop(
1513
        columns=["Load"],
1514
        errors="ignore",
1515
    )
1516
1517
    neighbor_loads.to_sql(
1518
        "egon_etrago_load",
1519
        engine,
1520
        schema="grid",
1521
        if_exists="append",
1522
        index=True,
1523
        index_label="load_id",
1524
    )
1525
1526
    # prepare neighboring stores for etrago tables
1527
    neighbor_stores["scn_name"] = "eGon100RE"
1528
1529
    # Unify carrier names
1530
    neighbor_stores.carrier = neighbor_stores.carrier.str.replace(" ", "_")
1531
1532
    neighbor_stores.carrier.replace(
1533
        {
1534
            "Li_ion": "battery",
1535
            "gas": "CH4",
1536
            "urban_central_water_tanks": "central_heat_store",
1537
            "rural_water_tanks": "rural_heat_store",
1538
            "EV_battery": "battery_storage",
1539
        },
1540
        inplace=True,
1541
    )
1542
    neighbor_stores.loc[
1543
        (
1544
            (neighbor_stores.e_nom_max <= 1e9)
1545
            & (neighbor_stores.carrier == "H2_Store")
1546
        ),
1547
        "carrier",
1548
    ] = "H2_underground"
1549
    neighbor_stores.loc[
1550
        (
1551
            (neighbor_stores.e_nom_max > 1e9)
1552
            & (neighbor_stores.carrier == "H2_Store")
1553
        ),
1554
        "carrier",
1555
    ] = "H2_overground"
1556
1557
    for i in [
1558
        "Store",
1559
        "p_set",
1560
        "q_set",
1561
        "e_nom_opt",
1562
        "lifetime",
1563
        "e_initial_per_period",
1564
        "e_cyclic_per_period",
1565
        "location",
1566
    ]:
1567
        neighbor_stores = neighbor_stores.drop(i, axis=1, errors="ignore")
1568
1569
    for c in ["H2_underground", "H2_overground"]:
1570
        neighbor_stores.loc[
1571
            (neighbor_stores.carrier == c),
1572
            "lifetime",
1573
        ] = get_sector_parameters("gas", "eGon100RE")["lifetime"][c]
1574
1575
    neighbor_stores.to_sql(
1576
        "egon_etrago_store",
1577
        engine,
1578
        schema="grid",
1579
        if_exists="append",
1580
        index=True,
1581
        index_label="store_id",
1582
    )
1583
1584
    # prepare neighboring storage_units for etrago tables
1585
    neighbor_storage["scn_name"] = "eGon100RE"
1586
1587
    # Unify carrier names
1588
    neighbor_storage.carrier = neighbor_storage.carrier.str.replace(" ", "_")
1589
1590
    neighbor_storage.carrier.replace(
1591
        {"PHS": "pumped_hydro", "hydro": "reservoir"}, inplace=True
1592
    )
1593
1594
    for i in [
1595
        "StorageUnit",
1596
        "p_nom_opt",
1597
        "state_of_charge_initial_per_period",
1598
        "cyclic_state_of_charge_per_period",
1599
    ]:
1600
        neighbor_storage = neighbor_storage.drop(i, axis=1, errors="ignore")
1601
1602
    neighbor_storage.to_sql(
1603
        "egon_etrago_storage",
1604
        engine,
1605
        schema="grid",
1606
        if_exists="append",
1607
        index=True,
1608
        index_label="storage_id",
1609
    )
1610
1611
    # writing neighboring loads_t p_sets to etrago tables
1612
1613
    neighbor_loads_t_etrago = pd.DataFrame(
1614
        columns=["scn_name", "temp_id", "p_set"],
1615
        index=neighbor_loads_t.columns,
1616
    )
1617
    neighbor_loads_t_etrago["scn_name"] = "eGon100RE"
1618
    neighbor_loads_t_etrago["temp_id"] = 1
1619
    for i in neighbor_loads_t.columns:
1620
        neighbor_loads_t_etrago["p_set"][i] = neighbor_loads_t[
1621
            i
1622
        ].values.tolist()
1623
1624
    neighbor_loads_t_etrago.to_sql(
1625
        "egon_etrago_load_timeseries",
1626
        engine,
1627
        schema="grid",
1628
        if_exists="append",
1629
        index=True,
1630
        index_label="load_id",
1631
    )
1632
1633
    # writing neighboring link_t efficiency and p_max_pu to etrago tables
1634
    neighbor_link_t_etrago = pd.DataFrame(
1635
        columns=["scn_name", "temp_id", "p_max_pu", "efficiency"],
1636
        index=neighbor_eff_t.columns.to_list() + ev_p_max_pu.columns.to_list(),
1637
    )
1638
    neighbor_link_t_etrago["scn_name"] = "eGon100RE"
1639
    neighbor_link_t_etrago["temp_id"] = 1
1640
    for i in neighbor_eff_t.columns:
1641
        neighbor_link_t_etrago["efficiency"][i] = neighbor_eff_t[
1642
            i
1643
        ].values.tolist()
1644
    for i in ev_p_max_pu.columns:
1645
        neighbor_link_t_etrago["p_max_pu"][i] = ev_p_max_pu[i].values.tolist()
1646
1647
    neighbor_link_t_etrago.to_sql(
1648
        "egon_etrago_link_timeseries",
1649
        engine,
1650
        schema="grid",
1651
        if_exists="append",
1652
        index=True,
1653
        index_label="link_id",
1654
    )
1655
1656
    # writing neighboring generator_t p_max_pu to etrago tables
1657
    neighbor_gens_t_etrago = pd.DataFrame(
1658
        columns=["scn_name", "temp_id", "p_max_pu"],
1659
        index=neighbor_gens_t.columns,
1660
    )
1661
    neighbor_gens_t_etrago["scn_name"] = "eGon100RE"
1662
    neighbor_gens_t_etrago["temp_id"] = 1
1663
    for i in neighbor_gens_t.columns:
1664
        neighbor_gens_t_etrago["p_max_pu"][i] = neighbor_gens_t[
1665
            i
1666
        ].values.tolist()
1667
1668
    neighbor_gens_t_etrago.to_sql(
1669
        "egon_etrago_generator_timeseries",
1670
        engine,
1671
        schema="grid",
1672
        if_exists="append",
1673
        index=True,
1674
        index_label="generator_id",
1675
    )
1676
1677
    # writing neighboring stores_t e_min_pu to etrago tables
1678
    neighbor_stores_t_etrago = pd.DataFrame(
1679
        columns=["scn_name", "temp_id", "e_min_pu"],
1680
        index=neighbor_stores_t.columns,
1681
    )
1682
    neighbor_stores_t_etrago["scn_name"] = "eGon100RE"
1683
    neighbor_stores_t_etrago["temp_id"] = 1
1684
    for i in neighbor_stores_t.columns:
1685
        neighbor_stores_t_etrago["e_min_pu"][i] = neighbor_stores_t[
1686
            i
1687
        ].values.tolist()
1688
1689
    neighbor_stores_t_etrago.to_sql(
1690
        "egon_etrago_store_timeseries",
1691
        engine,
1692
        schema="grid",
1693
        if_exists="append",
1694
        index=True,
1695
        index_label="store_id",
1696
    )
1697
1698
    # writing neighboring storage_units inflow to etrago tables
1699
    neighbor_storage_t_etrago = pd.DataFrame(
1700
        columns=["scn_name", "temp_id", "inflow"],
1701
        index=neighbor_storage_t.columns,
1702
    )
1703
    neighbor_storage_t_etrago["scn_name"] = "eGon100RE"
1704
    neighbor_storage_t_etrago["temp_id"] = 1
1705
    for i in neighbor_storage_t.columns:
1706
        neighbor_storage_t_etrago["inflow"][i] = neighbor_storage_t[
1707
            i
1708
        ].values.tolist()
1709
1710
    neighbor_storage_t_etrago.to_sql(
1711
        "egon_etrago_storage_timeseries",
1712
        engine,
1713
        schema="grid",
1714
        if_exists="append",
1715
        index=True,
1716
        index_label="storage_id",
1717
    )
1718
1719
    # writing neighboring lines_t s_max_pu to etrago tables
1720
    if not network_solved.lines_t["s_max_pu"].empty:
1721
        neighbor_lines_t_etrago = pd.DataFrame(
1722
            columns=["scn_name", "s_max_pu"], index=neighbor_lines_t.columns
1723
        )
1724
        neighbor_lines_t_etrago["scn_name"] = "eGon100RE"
1725
1726
        for i in neighbor_lines_t.columns:
1727
            neighbor_lines_t_etrago["s_max_pu"][i] = neighbor_lines_t[
1728
                i
1729
            ].values.tolist()
1730
1731
        neighbor_lines_t_etrago.to_sql(
1732
            "egon_etrago_line_timeseries",
1733
            engine,
1734
            schema="grid",
1735
            if_exists="append",
1736
            index=True,
1737
            index_label="line_id",
1738
        )
1739
1740
1741 View Code Duplication
def prepared_network(planning_horizon=3):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
1742
    if egon.data.config.settings()["egon-data"]["--run-pypsa-eur"]:
1743
        with open(
1744
            __path__[0] + "/datasets/pypsaeur/config_prepare.yaml", "r"
1745
        ) as stream:
1746
            data_config = yaml.safe_load(stream)
1747
1748
        target_file = (
1749
            Path(".")
1750
            / "run-pypsa-eur"
1751
            / "pypsa-eur"
1752
            / "results"
1753
            / data_config["run"]["name"]
1754
            / "prenetworks"
1755
            / f"base_s_{data_config['scenario']['clusters'][0]}"
1756
            f"_l{data_config['scenario']['ll'][0]}"
1757
            f"_{data_config['scenario']['opts'][0]}"
1758
            f"_{data_config['scenario']['sector_opts'][0]}"
1759
            f"_{data_config['scenario']['planning_horizons'][planning_horizon]}.nc"
1760
        )
1761
1762
    else:
1763
        target_file = (
1764
            Path(".")
1765
            / "data_bundle_egon_data"
1766
            / "pypsa_eur"
1767
            / "prenetworks"
1768
            / "prenetwork_post-manipulate_pre-solve"
1769
            / "base_s_39_lc1.25__cb40ex0-T-H-I-B-solar+p3-dist1_2045.nc"
1770
        )
1771
1772
    return pypsa.Network(target_file.absolute().as_posix())
1773
1774
1775
def overwrite_H2_pipeline_share():
1776
    """Overwrite retrofitted_CH4pipeline-to-H2pipeline_share value
1777
1778
    Overwrite retrofitted_CH4pipeline-to-H2pipeline_share in the
1779
    scenario parameter table if p-e-s is run.
1780
    This function write in the database and has no return.
1781
1782
    """
1783
    scn_name = "eGon100RE"
1784
    # Select source and target from dataset configuration
1785
    target = egon.data.config.datasets()["pypsa-eur-sec"]["target"]
1786
1787
    n = read_network(planning_horizon=2045)
1788
1789
    H2_pipelines = n.links[n.links["carrier"] == "H2 pipeline retrofitted"]
1790
    CH4_pipelines = n.links[n.links["carrier"] == "gas pipeline"]
1791
    H2_pipes_share = np.mean(
1792
        [
1793
            (i / j)
1794
            for i, j in zip(
1795
                H2_pipelines.p_nom_opt.to_list(), CH4_pipelines.p_nom.to_list()
1796
            )
1797
        ]
1798
    )
1799
    logger.info(
1800
        "retrofitted_CH4pipeline-to-H2pipeline_share = " + str(H2_pipes_share)
1801
    )
1802
1803
    parameters = db.select_dataframe(
1804
        f"""
1805
        SELECT *
1806
        FROM {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1807
        WHERE name = '{scn_name}'
1808
        """
1809
    )
1810
1811
    gas_param = parameters.loc[0, "gas_parameters"]
1812
    gas_param["retrofitted_CH4pipeline-to-H2pipeline_share"] = H2_pipes_share
1813
    gas_param = json.dumps(gas_param)
1814
1815
    # Update data in db
1816
    db.execute_sql(
1817
        f"""
1818
    UPDATE {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1819
    SET gas_parameters = '{gas_param}'
1820
    WHERE name = '{scn_name}';
1821
    """
1822
    )
1823
1824
1825
def update_electrical_timeseries_germany(network):
1826
    """Replace electrical demand time series in Germany with data from egon-data
1827
1828
    Parameters
1829
    ----------
1830
    network : pypsa.Network
1831
        Network including demand time series from pypsa-eur
1832
1833
    Returns
1834
    -------
1835
    network : pypsa.Network
1836
        Network including electrical demand time series in Germany from egon-data
1837
1838
    """
1839
    year = network.year
1840
    skip = network.snapshot_weightings.objective.iloc[0].astype("int")
1841
    df = pd.read_csv(
1842
        "input-pypsa-eur-sec/electrical_demand_timeseries_DE_eGon100RE.csv"
1843
    )
1844
1845
    annual_demand = pd.Series(index=[2019, 2037])
1846
    annual_demand_industry = pd.Series(index=[2019, 2037])
1847
    # Define values from status2019 for interpolation
1848
    # Residential and service (in TWh)
1849
    annual_demand.loc[2019] = 124.71 + 143.26
1850
    # Industry (in TWh)
1851
    annual_demand_industry.loc[2019] = 241.925
1852
1853
    # Define values from NEP 2023 scenario B 2037 for interpolation
1854
    # Residential and service (in TWh)
1855
    annual_demand.loc[2037] = 104 + 153.1
1856
    # Industry (in TWh)
1857
    annual_demand_industry.loc[2037] = 334.0
1858
1859
    # Set interpolated demands for years between 2019 and 2045
1860
    if year < 2037:
1861
        # Calculate annual demands for year by linear interpolating between
1862
        # 2019 and 2037
1863
        # Done seperatly for industry and residential and service to fit
1864
        # to pypsa-eurs structure
1865
        annual_rate = (annual_demand.loc[2037] - annual_demand.loc[2019]) / (
1866
            2037 - 2019
1867
        )
1868
        annual_demand_year = annual_demand.loc[2019] + annual_rate * (
1869
            year - 2019
1870
        )
1871
1872
        annual_rate_industry = (
1873
            annual_demand_industry.loc[2037] - annual_demand_industry.loc[2019]
1874
        ) / (2037 - 2019)
1875
        annual_demand_year_industry = annual_demand_industry.loc[
1876
            2019
1877
        ] + annual_rate_industry * (year - 2019)
1878
1879
        # Scale time series for 100% scenario with the annual demands
1880
        # The shape of the curve is taken from the 100% scenario since the
1881
        # same weather and calender year is used there
1882
        network.loads_t.p_set.loc[:, "DE0 0"] = (
1883
            df["residential_and_service"].loc[::skip]
1884
            / df["residential_and_service"].sum()
1885
            * annual_demand_year
1886
            * 1e6
1887
        ).values
1888
1889
        network.loads_t.p_set.loc[:, "DE0 0 industry electricity"] = (
1890
            df["industry"].loc[::skip]
1891
            / df["industry"].sum()
1892
            * annual_demand_year_industry
1893
            * 1e6
1894
        ).values
1895
1896
    elif year == 2045:
1897
        network.loads_t.p_set.loc[:, "DE0 0"] = df[
1898
            "residential_and_service"
1899
        ].loc[::skip]
1900
1901
        network.loads_t.p_set.loc[:, "DE0 0 industry electricity"] = (
1902
            df["industry"].loc[::skip].values
1903
        )
1904
1905
    else:
1906
        print(
1907
            "Scaling not implemented for years between 2037 and 2045 and beyond."
1908
        )
1909
        return
1910
1911
    network.loads.loc["DE0 0 industry electricity", "p_set"] = 0.0
1912
1913
    return network
1914
1915
1916
def geothermal_district_heating(network):
1917
    """Add the option to build geothermal power plants in district heating in Germany
1918
1919
    Parameters
1920
    ----------
1921
    network : pypsa.Network
1922
        Network from pypsa-eur without geothermal generators
1923
1924
    Returns
1925
    -------
1926
    network : pypsa.Network
1927
        Updated network with geothermal generators
1928
1929
    """
1930
1931
    costs_and_potentials = pd.read_csv(
1932
        "input-pypsa-eur-sec/geothermal_potential_germany.csv"
1933
    )
1934
1935
    network.add("Carrier", "urban central geo thermal")
1936
1937
    for i, row in costs_and_potentials.iterrows():
1938
        # Set lifetime of geothermal plant to 30 years based on:
1939
        # Ableitung eines Korridors für den Ausbau der erneuerbaren Wärme im Gebäudebereich,
1940
        # Beuth Hochschule für Technik, Berlin ifeu – Institut für Energie- und Umweltforschung Heidelberg GmbH
1941
        # Februar 2017
1942
        lifetime_geothermal = 30
1943
1944
        network.add(
1945
            "Generator",
1946
            f"DE0 0 urban central geo thermal {i}",
1947
            bus="DE0 0 urban central heat",
1948
            carrier="urban central geo thermal",
1949
            p_nom_extendable=True,
1950
            p_nom_max=row["potential [MW]"],
1951
            capital_cost=annualize_capital_costs(
1952
                row["cost [EUR/kW]"] * 1e6, lifetime_geothermal, 0.07
1953
            ),
1954
        )
1955
    return network
1956
1957
1958
def h2_overground_stores(network):
1959
    """Add hydrogen overground stores to each hydrogen node
1960
1961
    In pypsa-eur, only countries without the potential of underground hydrogen
1962
    stores have to option to build overground hydrogen tanks.
1963
    Overground stores are more expensive, but are not resitcted by the geological
1964
    potential. To allow higher hydrogen store capacities in each country, optional
1965
    hydogen overground tanks are also added to node with a potential for
1966
    underground stores.
1967
1968
    Parameters
1969
    ----------
1970
    network : pypsa.Network
1971
        Network without hydrogen overground stores at each hydrogen node
1972
1973
    Returns
1974
    -------
1975
    network : pypsa.Network
1976
        Network with hydrogen overground stores at each hydrogen node
1977
1978
    """
1979
1980
    underground_h2_stores = network.stores[
1981
        (network.stores.carrier == "H2 Store")
1982
        & (network.stores.e_nom_max != np.inf)
1983
    ]
1984
1985
    overground_h2_stores = network.stores[
1986
        (network.stores.carrier == "H2 Store")
1987
        & (network.stores.e_nom_max == np.inf)
1988
    ]
1989
1990
    network.madd(
1991
        "Store",
1992
        underground_h2_stores.bus + " overground Store",
1993
        bus=underground_h2_stores.bus.values,
1994
        e_nom_extendable=True,
1995
        e_cyclic=True,
1996
        carrier="H2 Store",
1997
        capital_cost=overground_h2_stores.capital_cost.mean(),
1998
    )
1999
2000
    return network
2001
2002
2003
def update_heat_timeseries_germany(network):
2004
    network.loads
2005
    # Import heat demand curves for Germany from eGon-data
2006
    df_egon_heat_demand = pd.read_csv(
2007
        "input-pypsa-eur-sec/heat_demand_timeseries_DE_eGon100RE.csv"
2008
    )
2009
2010
    # Replace heat demand curves in Germany with values from eGon-data
2011
    network.loads_t.p_set.loc[:, "DE1 0 rural heat"] = (
2012
        df_egon_heat_demand.loc[:, "residential rural"].values
2013
        + df_egon_heat_demand.loc[:, "service rural"].values
2014
    )
2015
2016
    network.loads_t.p_set.loc[:, "DE1 0 urban central heat"] = (
2017
        df_egon_heat_demand.loc[:, "urban central"].values
2018
    )
2019
2020
    return network
2021
2022
2023
def drop_biomass(network):
2024
    carrier = "biomass"
2025
2026
    for c in network.iterate_components():
2027
        network.mremove(c.name, c.df[c.df.index.str.contains(carrier)].index)
2028
    return network
2029
2030
2031
def postprocessing_biomass_2045():
2032
2033
    network = read_network(planning_horizon=2045)
2034
    network = drop_biomass(network)
2035
2036
    with open(
2037
        __path__[0] + "/datasets/pypsaeur/config_solve.yaml", "r"
2038
    ) as stream:
2039
        data_config = yaml.safe_load(stream)
2040
2041
    target_file = (
2042
        Path(".")
2043
        / "run-pypsa-eur"
2044
        / "pypsa-eur"
2045
        / "results"
2046
        / data_config["run"]["name"]
2047
        / "postnetworks"
2048
        / f"base_s_{data_config['scenario']['clusters'][0]}"
2049
        f"_l{data_config['scenario']['ll'][0]}"
2050
        f"_{data_config['scenario']['opts'][0]}"
2051
        f"_{data_config['scenario']['sector_opts'][0]}"
2052
        f"_{data_config['scenario']['planning_horizons'][3]}.nc"
2053
    )
2054
2055
    network.export_to_netcdf(target_file)
2056
2057
2058
def drop_urban_decentral_heat(network):
2059
    carrier = "urban decentral heat"
2060
2061
    # Add urban decentral heat demand to urban central heat demand
2062
    for country in network.loads.loc[
2063
        network.loads.carrier == carrier, "bus"
2064
    ].str[:5]:
2065
2066
        if f"{country} {carrier}" in network.loads_t.p_set.columns:
2067
            network.loads_t.p_set[
2068
                f"{country} rural heat"
2069
            ] += network.loads_t.p_set[f"{country} {carrier}"]
2070
        else:
2071
            print(
2072
                f"""No time series available for {country} {carrier}.
2073
                  Using static p_set."""
2074
            )
2075
2076
            network.loads_t.p_set[
2077
                f"{country} rural heat"
2078
            ] += network.loads.loc[f"{country} {carrier}", "p_set"]
2079
2080
    # In some cases low-temperature heat for industry is connected to the urban
2081
    # decentral heat bus since there is no urban central heat bus.
2082
    # These loads are connected to the representatiive rural heat bus:
2083
    network.loads.loc[
2084
        (network.loads.bus.str.contains(carrier))
2085
        & (~network.loads.carrier.str.contains(carrier.replace(" heat", ""))),
2086
        "bus",
2087
    ] = network.loads.loc[
2088
        (network.loads.bus.str.contains(carrier))
2089
        & (~network.loads.carrier.str.contains(carrier.replace(" heat", ""))),
2090
        "bus",
2091
    ].str.replace(
2092
        "urban decentral", "rural"
2093
    )
2094
2095
    # Drop componentents attached to urban decentral heat
2096
    for c in network.iterate_components():
2097
        network.mremove(
2098
            c.name, c.df[c.df.index.str.contains("urban decentral")].index
2099
        )
2100
2101
    return network
2102
2103
2104
def district_heating_shares(network):
2105
    df = pd.read_csv(
2106
        "data_bundle_powerd_data/district_heating_shares_egon.csv"
2107
    ).set_index("country_code")
2108
2109
    heat_demand_per_country = (
2110
        network.loads_t.p_set[
2111
            network.loads[
2112
                (network.loads.carrier.str.contains("heat"))
2113
                & network.loads.index.isin(network.loads_t.p_set.columns)
2114
            ].index
2115
        ]
2116
        .groupby(network.loads.bus.str[:5], axis=1)
2117
        .sum()
2118
    )
2119
2120
    for country in heat_demand_per_country.columns:
2121
        network.loads_t.p_set[f"{country} urban central heat"] = (
2122
            heat_demand_per_country.loc[:, country].mul(
2123
                df.loc[country[:2]].values[0]
2124
            )
2125
        )
2126
        network.loads_t.p_set[f"{country} rural heat"] = (
2127
            heat_demand_per_country.loc[:, country].mul(
2128
                (1 - df.loc[country[:2]].values[0])
2129
            )
2130
        )
2131
2132
    # Drop links with undefined buses or carrier
2133
    network.mremove(
2134
        "Link",
2135
        network.links[
2136
            ~network.links.bus0.isin(network.buses.index.values)
2137
        ].index,
2138
    )
2139
    network.mremove(
2140
        "Link",
2141
        network.links[network.links.carrier == ""].index,
2142
    )
2143
2144
    return network
2145
2146
2147
def drop_new_gas_pipelines(network):
2148
    network.mremove(
2149
        "Link",
2150
        network.links[
2151
            network.links.index.str.contains("gas pipeline new")
2152
        ].index,
2153
    )
2154
2155
    return network
2156
2157
2158
def drop_fossil_gas(network):
2159
    network.mremove(
2160
        "Generator",
2161
        network.generators[network.generators.carrier == "gas"].index,
2162
    )
2163
2164
    return network
2165
2166
2167
def drop_conventional_power_plants(network):
2168
2169
    # Drop lignite and coal power plants in Germany
2170
    network.mremove(
2171
        "Link",
2172
        network.links[
2173
            (network.links.carrier.isin(["coal", "lignite"]))
2174
            & (network.links.bus1.str.startswith("DE"))
2175
        ].index,
2176
    )
2177
2178
    return network
2179
2180
2181
def rual_heat_technologies(network):
2182
    network.mremove(
2183
        "Link",
2184
        network.links[
2185
            network.links.index.str.contains("rural gas boiler")
2186
        ].index,
2187
    )
2188
2189
    network.mremove(
2190
        "Generator",
2191
        network.generators[
2192
            network.generators.carrier.str.contains("rural solar thermal")
2193
        ].index,
2194
    )
2195
2196
    return network
2197
2198
2199
def coal_exit_D():
2200
2201
    df = pd.read_csv(
2202
        "run-pypsa-eur/pypsa-eur/resources/powerplants_s_39.csv", index_col=0
2203
    )
2204
    df_de_coal = df[
2205
        (df.Country == "DE")
2206
        & ((df.Fueltype == "Lignite") | (df.Fueltype == "Hard Coal"))
2207
    ]
2208
    df_de_coal.loc[df_de_coal.DateOut.values >= 2035, "DateOut"] = 2034
2209
    df.loc[df_de_coal.index] = df_de_coal
2210
2211
    df.to_csv("run-pypsa-eur/pypsa-eur/resources/powerplants_s_39.csv")
2212
2213
2214
def offwind_potential_D(network, capacity_per_sqkm=4):
2215
2216
    offwind_ac_factor = 1942
2217
    offwind_dc_factor = 10768
2218
    offwind_float_factor = 134
2219
2220
    # set p_nom_max for German offshore with respect to capacity_per_sqkm = 4 instead of default 2 (which is applied for the rest of Europe)
2221
    network.generators.loc[
2222
        (network.generators.bus == "DE0 0")
2223
        & (network.generators.carrier == "offwind-ac"),
2224
        "p_nom_max",
2225
    ] = (
2226
        offwind_ac_factor * capacity_per_sqkm
2227
    )
2228
    network.generators.loc[
2229
        (network.generators.bus == "DE0 0")
2230
        & (network.generators.carrier == "offwind-dc"),
2231
        "p_nom_max",
2232
    ] = (
2233
        offwind_dc_factor * capacity_per_sqkm
2234
    )
2235
    network.generators.loc[
2236
        (network.generators.bus == "DE0 0")
2237
        & (network.generators.carrier == "offwind-float"),
2238
        "p_nom_max",
2239
    ] = (
2240
        offwind_float_factor * capacity_per_sqkm
2241
    )
2242
2243
    return network
2244
2245
2246
def additional_grid_expansion_2045(network):
2247
2248
    network.global_constraints.loc["lc_limit", "constant"] *= 1.05
2249
2250
    return network
2251
2252
2253
def execute():
2254
    if egon.data.config.settings()["egon-data"]["--run-pypsa-eur"]:
2255
        with open(
2256
            __path__[0] + "/datasets/pypsaeur/config.yaml", "r"
2257
        ) as stream:
2258
            data_config = yaml.safe_load(stream)
2259
2260
        if data_config["foresight"] == "myopic":
2261
2262
            print("Adjusting scenarios on the myopic pathway...")
2263
2264
            coal_exit_D()
2265
2266
            networks = pd.Series()
2267
2268
            for i in range(
2269
                0, len(data_config["scenario"]["planning_horizons"])
2270
            ):
2271
                nc_file = pd.Series(
2272
                    f"base_s_{data_config['scenario']['clusters'][0]}"
2273
                    f"_l{data_config['scenario']['ll'][0]}"
2274
                    f"_{data_config['scenario']['opts'][0]}"
2275
                    f"_{data_config['scenario']['sector_opts'][0]}"
2276
                    f"_{data_config['scenario']['planning_horizons'][i]}.nc"
2277
                )
2278
                networks = networks._append(nc_file)
2279
2280
            scn_path = pd.DataFrame(
2281
                index=["2025", "2030", "2035", "2045"],
2282
                columns=["prenetwork", "functions"],
2283
            )
2284
2285
            for year in scn_path.index:
2286
                scn_path.at[year, "prenetwork"] = networks[
2287
                    networks.str.contains(year)
2288
                ].values
2289
2290
            for year in ["2025", "2030", "2035"]:
2291
                scn_path.loc[year, "functions"] = [
2292
                    # drop_urban_decentral_heat,
2293
                    update_electrical_timeseries_germany,
2294
                    geothermal_district_heating,
2295
                    h2_overground_stores,
2296
                    drop_new_gas_pipelines,
2297
                    offwind_potential_D,
2298
                ]
2299
2300
            scn_path.loc["2045", "functions"] = [
2301
                drop_biomass,
2302
                # drop_urban_decentral_heat,
2303
                update_electrical_timeseries_germany,
2304
                geothermal_district_heating,
2305
                h2_overground_stores,
2306
                drop_new_gas_pipelines,
2307
                drop_fossil_gas,
2308
                offwind_potential_D,
2309
                additional_grid_expansion_2045,
2310
                # drop_conventional_power_plants,
2311
                # rual_heat_technologies, #To be defined
2312
            ]
2313
2314
            network_path = (
2315
                Path(".")
2316
                / "run-pypsa-eur"
2317
                / "pypsa-eur"
2318
                / "results"
2319
                / data_config["run"]["name"]
2320
                / "prenetworks"
2321
            )
2322
2323
            for scn in scn_path.index:
2324
                path = network_path / scn_path.at[scn, "prenetwork"]
2325
                network = pypsa.Network(path)
2326
                network.year = int(scn)
2327
                for manipulator in scn_path.at[scn, "functions"]:
2328
                    network = manipulator(network)
2329
                network.export_to_netcdf(path)
2330
2331
        elif (data_config["foresight"] == "overnight") & (
2332
            int(data_config["scenario"]["planning_horizons"][0]) > 2040
2333
        ):
2334
2335
            print("Adjusting overnight long-term scenario...")
2336
2337
            network_path = (
2338
                Path(".")
2339
                / "run-pypsa-eur"
2340
                / "pypsa-eur"
2341
                / "results"
2342
                / data_config["run"]["name"]
2343
                / "prenetworks"
2344
                / f"elec_s_{data_config['scenario']['clusters'][0]}"
2345
                f"_l{data_config['scenario']['ll'][0]}"
2346
                f"_{data_config['scenario']['opts'][0]}"
2347
                f"_{data_config['scenario']['sector_opts'][0]}"
2348
                f"_{data_config['scenario']['planning_horizons'][0]}.nc"
2349
            )
2350
2351
            network = pypsa.Network(network_path)
2352
2353
            network = drop_biomass(network)
2354
2355
            network = drop_urban_decentral_heat(network)
2356
2357
            network = district_heating_shares(network)
2358
2359
            network = update_heat_timeseries_germany(network)
2360
2361
            network = update_electrical_timeseries_germany(network)
2362
2363
            network = geothermal_district_heating(network)
2364
2365
            network = h2_overground_stores(network)
2366
2367
            network = drop_new_gas_pipelines(network)
2368
2369
            network = drop_fossil_gas(network)
2370
2371
            network = rual_heat_technologies(network)
2372
2373
            network.export_to_netcdf(network_path)
2374
2375
        else:
2376
            print(
2377
                f"""Adjustments on prenetworks are not implemented for
2378
                foresight option {data_config['foresight']} and
2379
                year int(data_config['scenario']['planning_horizons'][0].
2380
                Please check the pypsaeur.execute function.
2381
                """
2382
            )
2383
    else:
2384
        print("Pypsa-eur is not executed due to the settings of egon-data")
2385