Passed
Pull Request — dev (#931)
by
unknown
01:31
created

overwrite_max_gas_generation_overtheyear()   A

Complexity

Conditions 2

Size

Total Lines 34
Code Lines 14

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 14
dl 0
loc 34
rs 9.7
c 0
b 0
f 0
cc 2
nop 0
1
"""The central module containing all code dealing with importing data from
2
the pysa-eur-sec scenario parameter creation
3
"""
4
5
from pathlib import Path
6
from urllib.request import urlretrieve
7
import json
8
import os
9
import tarfile
10
11
from shapely.geometry import LineString
12
import geopandas as gpd
13
import importlib_resources as resources
14
import numpy as np
15
import pandas as pd
16
import pypsa
17
import yaml
18
19
from egon.data import __path__, config, db, logger
20
from egon.data.datasets import Dataset
21
from egon.data.datasets.scenario_parameters import get_sector_parameters
22
import egon.data.config
23
import egon.data.subprocess as subproc
24
25
26
def run_pypsa_eur_sec():
27
28
    cwd = Path(".")
29
    filepath = cwd / "run-pypsa-eur-sec"
30
    filepath.mkdir(parents=True, exist_ok=True)
31
32
    pypsa_eur_repos = filepath / "pypsa-eur"
33
    pypsa_eur_repos_data = pypsa_eur_repos / "data"
34
    technology_data_repos = filepath / "technology-data"
35
    pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
36
    pypsa_eur_sec_repos_data = pypsa_eur_sec_repos / "data"
37
38
    if not pypsa_eur_repos.exists():
39
        subproc.run(
40
            [
41
                "git",
42
                "clone",
43
                "--branch",
44
                "v0.4.0",
45
                "https://github.com/PyPSA/pypsa-eur.git",
46
                pypsa_eur_repos,
47
            ]
48
        )
49
50
        # subproc.run(
51
        #     ["git", "checkout", "4e44822514755cdd0289687556547100fba6218b"],
52
        #     cwd=pypsa_eur_repos,
53
        # )
54
55
        file_to_copy = os.path.join(
56
            __path__[0], "datasets", "pypsaeursec", "pypsaeur", "Snakefile"
57
        )
58
59
        subproc.run(["cp", file_to_copy, pypsa_eur_repos])
60
61
        # Read YAML file
62
        path_to_env = pypsa_eur_repos / "envs" / "environment.yaml"
63
        with open(path_to_env, "r") as stream:
64
            env = yaml.safe_load(stream)
65
66
        env["dependencies"].append("gurobi")
67
68
        # Write YAML file
69
        with open(path_to_env, "w", encoding="utf8") as outfile:
70
            yaml.dump(
71
                env, outfile, default_flow_style=False, allow_unicode=True
72
            )
73
74
        datafile = "pypsa-eur-data-bundle.tar.xz"
75
        datapath = pypsa_eur_repos / datafile
76
        if not datapath.exists():
77
            urlretrieve(
78
                f"https://zenodo.org/record/3517935/files/{datafile}", datapath
79
            )
80
            tar = tarfile.open(datapath)
81
            tar.extractall(pypsa_eur_repos_data)
82
83
    if not technology_data_repos.exists():
84
        subproc.run(
85
            [
86
                "git",
87
                "clone",
88
                "--branch",
89
                "v0.3.0",
90
                "https://github.com/PyPSA/technology-data.git",
91
                technology_data_repos,
92
            ]
93
        )
94
95
    if not pypsa_eur_sec_repos.exists():
96
        subproc.run(
97
            [
98
                "git",
99
                "clone",
100
                "https://github.com/openego/pypsa-eur-sec.git",
101
                pypsa_eur_sec_repos,
102
            ]
103
        )
104
105
    datafile = "pypsa-eur-sec-data-bundle.tar.gz"
106
    datapath = pypsa_eur_sec_repos_data / datafile
107
    if not datapath.exists():
108
        urlretrieve(
109
            f"https://zenodo.org/record/5824485/files/{datafile}", datapath
110
        )
111
        tar = tarfile.open(datapath)
112
        tar.extractall(pypsa_eur_sec_repos_data)
113
114
    with open(filepath / "Snakefile", "w") as snakefile:
115
        snakefile.write(
116
            resources.read_text("egon.data.datasets.pypsaeursec", "Snakefile")
117
        )
118
119
    subproc.run(
120
        [
121
            "snakemake",
122
            "-j1",
123
            "--directory",
124
            filepath,
125
            "--snakefile",
126
            filepath / "Snakefile",
127
            "--use-conda",
128
            "--conda-frontend=conda",
129
            "Main",
130
        ]
131
    )
132
133
134
def read_network():
135
136
    # Set execute_pypsa_eur_sec to False until optional task is implemented
137
    execute_pypsa_eur_sec = False
138
    cwd = Path(".")
139
140
    if execute_pypsa_eur_sec:
141
        filepath = cwd / "run-pypsa-eur-sec"
142
        pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
143
        # Read YAML file
144
        pes_egonconfig = pypsa_eur_sec_repos / "config_egon.yaml"
145
        with open(pes_egonconfig, "r") as stream:
146
            data_config = yaml.safe_load(stream)
147
148
        simpl = data_config["scenario"]["simpl"][0]
149
        clusters = data_config["scenario"]["clusters"][0]
150
        lv = data_config["scenario"]["lv"][0]
151
        opts = data_config["scenario"]["opts"][0]
152
        sector_opts = data_config["scenario"]["sector_opts"][0]
153
        planning_horizons = data_config["scenario"]["planning_horizons"][0]
154
        file = "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc".format(
155
            simpl=simpl,
156
            clusters=clusters,
157
            opts=opts,
158
            lv=lv,
159
            sector_opts=sector_opts,
160
            planning_horizons=planning_horizons,
161
        )
162
163
        target_file = (
164
            pypsa_eur_sec_repos
165
            / "results"
166
            / data_config["run"]
167
            / "postnetworks"
168
            / file
169
        )
170
171
    else:
172
        target_file = (
173
            cwd
174
            / "data_bundle_egon_data"
175
            / "pypsa_eur_sec"
176
            / "2022-07-26-egondata-integration"
177
            / "postnetworks"
178
            / "elec_s_37_lv2.0__Co2L0-1H-T-H-B-I-dist1_2050.nc"
179
        )
180
181
    return pypsa.Network(str(target_file))
182
183
184
def clean_database():
185
    """Remove all components abroad for eGon100RE of the database
186
187
    Remove all components abroad and their associated time series of
188
    the datase for the scenario 'eGon100RE'.
189
190
    Parameters
191
    ----------
192
    None
193
194
    Returns
195
    -------
196
    None
197
198
    """
199
    scn_name = "eGon100RE"
200
201
    comp_one_port = ["load", "generator", "store", "storage"]
202
203
    # delete existing components and associated timeseries
204
    for comp in comp_one_port:
205
        db.execute_sql(
206
            f"""
207
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
208
            WHERE {comp + "_id"} IN (
209
                SELECT {comp + "_id"} FROM {"grid.egon_etrago_" + comp}
210
                WHERE bus IN (
211
                    SELECT bus_id FROM grid.egon_etrago_bus
212
                    WHERE country != 'DE'
213
                    AND scn_name = '{scn_name}')
214
                AND scn_name = '{scn_name}'
215
            );
216
217
            DELETE FROM {"grid.egon_etrago_" + comp}
218
            WHERE bus IN (
219
                SELECT bus_id FROM grid.egon_etrago_bus
220
                WHERE country != 'DE'
221
                AND scn_name = '{scn_name}')
222
            AND scn_name = '{scn_name}';"""
223
        )
224
225
    comp_2_ports = [
226
        "line",
227
        "transformer",
228
        "link",
229
    ]
230
231
    for comp, id in zip(comp_2_ports, ["line_id", "trafo_id", "link_id"]):
232
        db.execute_sql(
233
            f"""
234
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
235
            WHERE scn_name = '{scn_name}'
236
            AND {id} IN (
237
                SELECT {id} FROM {"grid.egon_etrago_" + comp}
238
            WHERE "bus0" IN (
239
            SELECT bus_id FROM grid.egon_etrago_bus
240
                WHERE country != 'DE'
241
                AND scn_name = '{scn_name}')
242
            AND "bus1" IN (
243
            SELECT bus_id FROM grid.egon_etrago_bus
244
                WHERE country != 'DE'
245
                AND scn_name = '{scn_name}')
246
            );
247
248
            DELETE FROM {"grid.egon_etrago_" + comp}
249
            WHERE scn_name = '{scn_name}'
250
            AND "bus0" IN (
251
            SELECT bus_id FROM grid.egon_etrago_bus
252
                WHERE country != 'DE'
253
                AND scn_name = '{scn_name}')
254
            AND "bus1" IN (
255
            SELECT bus_id FROM grid.egon_etrago_bus
256
                WHERE country != 'DE'
257
                AND scn_name = '{scn_name}')
258
            ;"""
259
        )
260
261
    db.execute_sql(
262
        "DELETE FROM grid.egon_etrago_bus "
263
        "WHERE scn_name = '{scn_name}' "
264
        "AND country <> 'DE'"
265
    )
266
267
268
def neighbor_reduction():
269
270
    network = read_network()
271
272
    network.links.drop("pipe_retrofit", axis="columns", inplace=True)
273
274
    wanted_countries = [
275
        "DE",
276
        "AT",
277
        "CH",
278
        "CZ",
279
        "PL",
280
        "SE",
281
        "NO",
282
        "DK",
283
        "GB",
284
        "NL",
285
        "BE",
286
        "FR",
287
        "LU",
288
    ]
289
    foreign_buses = network.buses[
290
        ~network.buses.index.str.contains("|".join(wanted_countries))
291
    ]
292
    network.buses = network.buses.drop(
293
        network.buses.loc[foreign_buses.index].index
294
    )
295
296
    # drop foreign lines and links from the 2nd row
297
298
    network.lines = network.lines.drop(
299
        network.lines[
300
            (network.lines["bus0"].isin(network.buses.index) == False)
301
            & (network.lines["bus1"].isin(network.buses.index) == False)
302
        ].index
303
    )
304
305
    # select all lines which have at bus1 the bus which is kept
306
    lines_cb_1 = network.lines[
307
        (network.lines["bus0"].isin(network.buses.index) == False)
308
    ]
309
310
    # create a load at bus1 with the line's hourly loading
311
    for i, k in zip(lines_cb_1.bus1.values, lines_cb_1.index):
312
        network.add(
313
            "Load",
314
            "slack_fix " + i + " " + k,
315
            bus=i,
316
            p_set=network.lines_t.p1[k],
317
        )
318
        network.loads.carrier.loc[
319
            "slack_fix " + i + " " + k
320
        ] = lines_cb_1.carrier[k]
321
322
    # select all lines which have at bus0 the bus which is kept
323
    lines_cb_0 = network.lines[
324
        (network.lines["bus1"].isin(network.buses.index) == False)
325
    ]
326
327
    # create a load at bus0 with the line's hourly loading
328
    for i, k in zip(lines_cb_0.bus0.values, lines_cb_0.index):
329
        network.add(
330
            "Load",
331
            "slack_fix " + i + " " + k,
332
            bus=i,
333
            p_set=network.lines_t.p0[k],
334
        )
335
        network.loads.carrier.loc[
336
            "slack_fix " + i + " " + k
337
        ] = lines_cb_0.carrier[k]
338
339
    # do the same for links
340
341
    network.links = network.links.drop(
342
        network.links[
343
            (network.links["bus0"].isin(network.buses.index) == False)
344
            & (network.links["bus1"].isin(network.buses.index) == False)
345
        ].index
346
    )
347
348
    # select all links which have at bus1 the bus which is kept
349
    links_cb_1 = network.links[
350
        (network.links["bus0"].isin(network.buses.index) == False)
351
    ]
352
353
    # create a load at bus1 with the link's hourly loading
354
    for i, k in zip(links_cb_1.bus1.values, links_cb_1.index):
355
        network.add(
356
            "Load",
357
            "slack_fix_links " + i + " " + k,
358
            bus=i,
359
            p_set=network.links_t.p1[k],
360
        )
361
        network.loads.carrier.loc[
362
            "slack_fix_links " + i + " " + k
363
        ] = links_cb_1.carrier[k]
364
365
    # select all links which have at bus0 the bus which is kept
366
    links_cb_0 = network.links[
367
        (network.links["bus1"].isin(network.buses.index) == False)
368
    ]
369
370
    # create a load at bus0 with the link's hourly loading
371
    for i, k in zip(links_cb_0.bus0.values, links_cb_0.index):
372
        network.add(
373
            "Load",
374
            "slack_fix_links " + i + " " + k,
375
            bus=i,
376
            p_set=network.links_t.p0[k],
377
        )
378
        network.loads.carrier.loc[
379
            "slack_fix_links " + i + " " + k
380
        ] = links_cb_0.carrier[k]
381
382
    # drop remaining foreign components
383
384
    network.lines = network.lines.drop(
385
        network.lines[
386
            (network.lines["bus0"].isin(network.buses.index) == False)
387
            | (network.lines["bus1"].isin(network.buses.index) == False)
388
        ].index
389
    )
390
391
    network.links = network.links.drop(
392
        network.links[
393
            (network.links["bus0"].isin(network.buses.index) == False)
394
            | (network.links["bus1"].isin(network.buses.index) == False)
395
        ].index
396
    )
397
398
    network.transformers = network.transformers.drop(
399
        network.transformers[
400
            (network.transformers["bus0"].isin(network.buses.index) == False)
401
            | (network.transformers["bus1"].isin(network.buses.index) == False)
402
        ].index
403
    )
404
    network.generators = network.generators.drop(
405
        network.generators[
406
            (network.generators["bus"].isin(network.buses.index) == False)
407
        ].index
408
    )
409
410
    network.loads = network.loads.drop(
411
        network.loads[
412
            (network.loads["bus"].isin(network.buses.index) == False)
413
        ].index
414
    )
415
416
    network.storage_units = network.storage_units.drop(
417
        network.storage_units[
418
            (network.storage_units["bus"].isin(network.buses.index) == False)
419
        ].index
420
    )
421
422
    components = [
423
        "loads",
424
        "generators",
425
        "lines",
426
        "buses",
427
        "transformers",
428
        "links",
429
    ]
430
    for g in components:  # loads_t
431
        h = g + "_t"
432
        nw = getattr(network, h)  # network.loads_t
433
        for i in nw.keys():  # network.loads_t.p
434
            cols = [
435
                j
436
                for j in getattr(nw, i).columns
437
                if j not in getattr(network, g).index
438
            ]
439
            for k in cols:
440
                del getattr(nw, i)[k]
441
442
    # writing components of neighboring countries to etrago tables
443
444
    # Set country tag for all buses
445
    network.buses.country = network.buses.index.str[:2]
446
    neighbors = network.buses[network.buses.country != "DE"]
447
448
    neighbors["new_index"] = (
449
        db.next_etrago_id("bus") + neighbors.reset_index().index
450
    )
451
452
    # lines, the foreign crossborder lines
453
    # (without crossborder lines to Germany!)
454
455
    neighbor_lines = network.lines[
456
        network.lines.bus0.isin(neighbors.index)
457
        & network.lines.bus1.isin(neighbors.index)
458
    ]
459
    if not network.lines_t["s_max_pu"].empty:
460
        neighbor_lines_t = network.lines_t["s_max_pu"][neighbor_lines.index]
461
462
    neighbor_lines.reset_index(inplace=True)
463
    neighbor_lines.bus0 = (
464
        neighbors.loc[neighbor_lines.bus0, "new_index"].reset_index().new_index
465
    )
466
    neighbor_lines.bus1 = (
467
        neighbors.loc[neighbor_lines.bus1, "new_index"].reset_index().new_index
468
    )
469
    neighbor_lines.index += db.next_etrago_id("line")
470
471
    if not network.lines_t["s_max_pu"].empty:
472
        for i in neighbor_lines_t.columns:
0 ignored issues
show
introduced by
The variable neighbor_lines_t does not seem to be defined in case BooleanNotNode on line 459 is False. Are you sure this can never be the case?
Loading history...
473
            new_index = neighbor_lines[neighbor_lines["name"] == i].index
474
            neighbor_lines_t.rename(columns={i: new_index[0]}, inplace=True)
475
476
    # links
477
    neighbor_links = network.links[
478
        network.links.bus0.isin(neighbors.index)
479
        & network.links.bus1.isin(neighbors.index)
480
    ]
481
482
    neighbor_links.reset_index(inplace=True)
483
    neighbor_links.bus0 = (
484
        neighbors.loc[neighbor_links.bus0, "new_index"].reset_index().new_index
485
    )
486
    neighbor_links.bus1 = (
487
        neighbors.loc[neighbor_links.bus1, "new_index"].reset_index().new_index
488
    )
489
    neighbor_links.index += db.next_etrago_id("link")
490
491
    # generators
492
    neighbor_gens = network.generators[
493
        network.generators.bus.isin(neighbors.index)
494
    ]
495
    neighbor_gens_t = network.generators_t["p_max_pu"][
496
        neighbor_gens[
497
            neighbor_gens.index.isin(network.generators_t["p_max_pu"].columns)
498
        ].index
499
    ]
500
501
    neighbor_gens.reset_index(inplace=True)
502
    neighbor_gens.bus = (
503
        neighbors.loc[neighbor_gens.bus, "new_index"].reset_index().new_index
504
    )
505
    neighbor_gens.index += db.next_etrago_id("generator")
506
507
    for i in neighbor_gens_t.columns:
508
        new_index = neighbor_gens[neighbor_gens["name"] == i].index
509
        neighbor_gens_t.rename(columns={i: new_index[0]}, inplace=True)
510
511
    # loads
512
513
    neighbor_loads = network.loads[network.loads.bus.isin(neighbors.index)]
514
    neighbor_loads_t_index = neighbor_loads.index[
515
        neighbor_loads.index.isin(network.loads_t.p_set.columns)
516
    ]
517
    neighbor_loads_t = network.loads_t["p_set"][neighbor_loads_t_index]
518
519
    neighbor_loads.reset_index(inplace=True)
520
    neighbor_loads.bus = (
521
        neighbors.loc[neighbor_loads.bus, "new_index"].reset_index().new_index
522
    )
523
    neighbor_loads.index += db.next_etrago_id("load")
524
525
    for i in neighbor_loads_t.columns:
526
        new_index = neighbor_loads[neighbor_loads["index"] == i].index
527
        neighbor_loads_t.rename(columns={i: new_index[0]}, inplace=True)
528
529
    # stores
530
    neighbor_stores = network.stores[network.stores.bus.isin(neighbors.index)]
531
    neighbor_stores_t_index = neighbor_stores.index[
532
        neighbor_stores.index.isin(network.stores_t.e_min_pu.columns)
533
    ]
534
    neighbor_stores_t = network.stores_t["e_min_pu"][neighbor_stores_t_index]
535
536
    neighbor_stores.reset_index(inplace=True)
537
    neighbor_stores.bus = (
538
        neighbors.loc[neighbor_stores.bus, "new_index"].reset_index().new_index
539
    )
540
    neighbor_stores.index += db.next_etrago_id("store")
541
542
    for i in neighbor_stores_t.columns:
543
        new_index = neighbor_stores[neighbor_stores["name"] == i].index
544
        neighbor_stores_t.rename(columns={i: new_index[0]}, inplace=True)
545
546
    # storage_units
547
    neighbor_storage = network.storage_units[
548
        network.storage_units.bus.isin(neighbors.index)
549
    ]
550
    neighbor_storage_t_index = neighbor_storage.index[
551
        neighbor_storage.index.isin(network.storage_units_t.inflow.columns)
552
    ]
553
    neighbor_storage_t = network.storage_units_t["inflow"][
554
        neighbor_storage_t_index
555
    ]
556
557
    neighbor_storage.reset_index(inplace=True)
558
    neighbor_storage.bus = (
559
        neighbors.loc[neighbor_storage.bus, "new_index"]
560
        .reset_index()
561
        .new_index
562
    )
563
    neighbor_storage.index += db.next_etrago_id("storage")
564
565
    for i in neighbor_storage_t.columns:
566
        new_index = neighbor_storage[neighbor_storage["name"] == i].index
567
        neighbor_storage_t.rename(columns={i: new_index[0]}, inplace=True)
568
569
    # Connect to local database
570
    engine = db.engine()
571
572
    neighbors["scn_name"] = "eGon100RE"
573
    neighbors.index = neighbors["new_index"]
574
575
    # Correct geometry for non AC buses
576
    carriers = set(neighbors.carrier.to_list())
577
    carriers.remove("AC")
578
    non_AC_neighbors = pd.DataFrame()
579
    for c in carriers:
580
        c_neighbors = neighbors[neighbors.carrier == c].set_index(
581
            "location", drop=False
582
        )
583
        for i in ["x", "y"]:
584
            c_neighbors = c_neighbors.drop(i, axis=1)
585
        coordinates = neighbors[neighbors.carrier == "AC"][
586
            ["location", "x", "y"]
587
        ].set_index("location")
588
        c_neighbors = pd.concat([coordinates, c_neighbors], axis=1).set_index(
589
            "new_index", drop=False
590
        )
591
        non_AC_neighbors = non_AC_neighbors.append(c_neighbors)
592
    neighbors = neighbors[neighbors.carrier == "AC"].append(non_AC_neighbors)
593
594
    for i in ["new_index", "control", "generator", "location", "sub_network"]:
595
        neighbors = neighbors.drop(i, axis=1)
596
597
    # Add geometry column
598
    neighbors = (
599
        gpd.GeoDataFrame(
600
            neighbors, geometry=gpd.points_from_xy(neighbors.x, neighbors.y)
601
        )
602
        .rename_geometry("geom")
603
        .set_crs(4326)
604
    )
605
606
    # Unify carrier names
607
    neighbors.carrier = neighbors.carrier.str.replace(" ", "_")
608
    neighbors.carrier.replace(
609
        {
610
            "gas": "CH4",
611
            "gas_for_industry": "CH4_for_industry",
612
        },
613
        inplace=True,
614
    )
615
616
    neighbors.to_postgis(
617
        "egon_etrago_bus",
618
        engine,
619
        schema="grid",
620
        if_exists="append",
621
        index=True,
622
        index_label="bus_id",
623
    )
624
625
    # prepare and write neighboring crossborder lines to etrago tables
626
    def lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE"):
627
        neighbor_lines["scn_name"] = scn
628
        neighbor_lines["cables"] = 3 * neighbor_lines["num_parallel"].astype(
629
            int
630
        )
631
        neighbor_lines["s_nom"] = neighbor_lines["s_nom_min"]
632
633
        for i in [
634
            "name",
635
            "x_pu_eff",
636
            "r_pu_eff",
637
            "sub_network",
638
            "x_pu",
639
            "r_pu",
640
            "g_pu",
641
            "b_pu",
642
            "s_nom_opt",
643
        ]:
644
            neighbor_lines = neighbor_lines.drop(i, axis=1)
645
646
        # Define geometry and add to lines dataframe as 'topo'
647
        gdf = gpd.GeoDataFrame(index=neighbor_lines.index)
648
        gdf["geom_bus0"] = neighbors.geom[neighbor_lines.bus0].values
649
        gdf["geom_bus1"] = neighbors.geom[neighbor_lines.bus1].values
650
        gdf["geometry"] = gdf.apply(
651
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
652
        )
653
654
        neighbor_lines = (
655
            gpd.GeoDataFrame(neighbor_lines, geometry=gdf["geometry"])
656
            .rename_geometry("topo")
657
            .set_crs(4326)
658
        )
659
660
        neighbor_lines["lifetime"] = get_sector_parameters("electricity", scn)[
661
            "lifetime"
662
        ]["ac_ehv_overhead_line"]
663
664
        neighbor_lines.to_postgis(
665
            "egon_etrago_line",
666
            engine,
667
            schema="grid",
668
            if_exists="append",
669
            index=True,
670
            index_label="line_id",
671
        )
672
673
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE")
674
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon2035")
675
676
    def links_to_etrago(neighbor_links, scn="eGon100RE", extendable=True):
677
        """Prepare and write neighboring crossborder links to eTraGo table
678
679
        This function prepare the neighboring crossborder links
680
        generated the PyPSA-eur-sec (p-e-s) run by:
681
          * Delete the useless columns
682
          * If extendable is false only (non default case):
683
              * Replace p_nom = 0 with the p_nom_op values (arrising
684
              from the p-e-s optimisation)
685
              * Setting p_nom_extendable to false
686
          * Add geomtry to the links: 'geom' and 'topo' columns
687
          * Change the name of the carriers to have the consistent in
688
            eGon-data
689
690
        The function insert then the link to the eTraGo table and has
691
        no return.
692
693
        Parameters
694
        ----------
695
        neighbor_links : pandas.DataFrame
696
            Dataframe containing the neighboring crossborder links
697
        scn_name : str
698
            Name of the scenario
699
        extendable : bool
700
            Boolean expressing if the links should be extendable or not
701
702
        Returns
703
        -------
704
        None
705
706
        """
707
        neighbor_links["scn_name"] = scn
708
709
        if extendable is True:
710
            neighbor_links = neighbor_links.drop(
711
                columns=[
712
                    "name",
713
                    "geometry",
714
                    "tags",
715
                    "under_construction",
716
                    "underground",
717
                    "underwater_fraction",
718
                    "bus2",
719
                    "bus3",
720
                    "bus4",
721
                    "efficiency2",
722
                    "efficiency3",
723
                    "efficiency4",
724
                    "lifetime",
725
                    "p_nom_opt",
726
                    "pipe_retrofit",
727
                ],
728
                errors="ignore",
729
            )
730
731
        elif extendable is False:
732
            neighbor_links = neighbor_links.drop(
733
                columns=[
734
                    "name",
735
                    "geometry",
736
                    "tags",
737
                    "under_construction",
738
                    "underground",
739
                    "underwater_fraction",
740
                    "bus2",
741
                    "bus3",
742
                    "bus4",
743
                    "efficiency2",
744
                    "efficiency3",
745
                    "efficiency4",
746
                    "lifetime",
747
                    "p_nom",
748
                    "p_nom_extendable",
749
                    "pipe_retrofit",
750
                ],
751
                errors="ignore",
752
            )
753
            neighbor_links = neighbor_links.rename(
754
                columns={"p_nom_opt": "p_nom"}
755
            )
756
            neighbor_links["p_nom_extendable"] = False
757
758
        # Define geometry and add to lines dataframe as 'topo'
759
        gdf = gpd.GeoDataFrame(index=neighbor_links.index)
760
        gdf["geom_bus0"] = neighbors.geom[neighbor_links.bus0].values
761
        gdf["geom_bus1"] = neighbors.geom[neighbor_links.bus1].values
762
        gdf["geometry"] = gdf.apply(
763
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
764
        )
765
766
        neighbor_links = (
767
            gpd.GeoDataFrame(neighbor_links, geometry=gdf["geometry"])
768
            .rename_geometry("topo")
769
            .set_crs(4326)
770
        )
771
772
        # Unify carrier names
773
        neighbor_links.carrier = neighbor_links.carrier.str.replace(" ", "_")
774
775
        neighbor_links.carrier.replace(
776
            {
777
                "H2_Electrolysis": "power_to_H2",
778
                "H2_Fuel_Cell": "H2_to_power",
779
                "H2_pipeline_retrofitted": "H2_retrofit",
780
                "SMR": "CH4_to_H2",
781
                "Sabatier": "H2_to_CH4",
782
                "gas_for_industry": "CH4_for_industry",
783
                "gas_pipeline": "CH4",
784
            },
785
            inplace=True,
786
        )
787
788
        neighbor_links.to_postgis(
789
            "egon_etrago_link",
790
            engine,
791
            schema="grid",
792
            if_exists="append",
793
            index=True,
794
            index_label="link_id",
795
        )
796
797
    non_extendable_links_carriers = [
798
        "H2 pipeline retrofitted",
799
        "gas pipeline",
800
        "biogas to gas",
801
    ]
802
803
    # delete unwanted carriers for eTraGo
804
    excluded_carriers = ["gas for industry CC", "SMR CC"]
805
    neighbor_links = neighbor_links[
806
        ~neighbor_links.carrier.isin(excluded_carriers)
807
    ]
808
809
    links_to_etrago(
810
        neighbor_links[
811
            ~neighbor_links.carrier.isin(non_extendable_links_carriers)
812
        ],
813
        "eGon100RE",
814
    )
815
    links_to_etrago(
816
        neighbor_links[
817
            neighbor_links.carrier.isin(non_extendable_links_carriers)
818
        ],
819
        "eGon100RE",
820
        extendable=False,
821
    )
822
823
    links_to_etrago(neighbor_links[neighbor_links.carrier == "DC"], "eGon2035")
824
825
    # prepare neighboring generators for etrago tables
826
    neighbor_gens["scn_name"] = "eGon100RE"
827
    neighbor_gens["p_nom"] = neighbor_gens["p_nom_opt"]
828
    neighbor_gens["p_nom_extendable"] = False
829
830
    # Unify carrier names
831
    neighbor_gens.carrier = neighbor_gens.carrier.str.replace(" ", "_")
832
833
    neighbor_gens.carrier.replace(
834
        {
835
            "onwind": "wind_onshore",
836
            "ror": "run_of_river",
837
            "offwind-ac": "wind_offshore",
838
            "offwind-dc": "wind_offshore",
839
            "urban_central_solar_thermal": "urban_central_solar_thermal_collector",
840
            "residential_rural_solar_thermal": "residential_rural_solar_thermal_collector",
841
            "services_rural_solar_thermal": "services_rural_solar_thermal_collector",
842
        },
843
        inplace=True,
844
    )
845
846
    for i in ["name", "weight", "lifetime", "p_set", "q_set", "p_nom_opt"]:
847
        neighbor_gens = neighbor_gens.drop(i, axis=1)
848
849
    neighbor_gens.to_sql(
850
        "egon_etrago_generator",
851
        engine,
852
        schema="grid",
853
        if_exists="append",
854
        index=True,
855
        index_label="generator_id",
856
    )
857
858
    # prepare neighboring loads for etrago tables
859
    neighbor_loads["scn_name"] = "eGon100RE"
860
861
    # Unify carrier names
862
    neighbor_loads.carrier = neighbor_loads.carrier.str.replace(" ", "_")
863
864
    neighbor_loads.carrier.replace(
865
        {
866
            "electricity": "AC",
867
            "DC": "AC",
868
            "industry_electricity": "AC",
869
            "H2_pipeline": "H2_system_boundary",
870
            "gas_for_industry": "CH4_for_industry",
871
        },
872
        inplace=True,
873
    )
874
875
    for i in ["index", "p_set", "q_set"]:
876
        neighbor_loads = neighbor_loads.drop(i, axis=1)
877
878
    neighbor_loads.to_sql(
879
        "egon_etrago_load",
880
        engine,
881
        schema="grid",
882
        if_exists="append",
883
        index=True,
884
        index_label="load_id",
885
    )
886
887
    # prepare neighboring stores for etrago tables
888
    neighbor_stores["scn_name"] = "eGon100RE"
889
890
    # Unify carrier names
891
    neighbor_stores.carrier = neighbor_stores.carrier.str.replace(" ", "_")
892
893
    neighbor_stores.carrier.replace(
894
        {
895
            "Li_ion": "battery",
896
            "gas": "CH4",
897
        },
898
        inplace=True,
899
    )
900
    neighbor_stores.loc[
901
        (
902
            (neighbor_stores.e_nom_max <= 1e9)
903
            & (neighbor_stores.carrier == "H2")
904
        ),
905
        "carrier",
906
    ] = "H2_underground"
907
    neighbor_stores.loc[
908
        (
909
            (neighbor_stores.e_nom_max > 1e9)
910
            & (neighbor_stores.carrier == "H2")
911
        ),
912
        "carrier",
913
    ] = "H2_overground"
914
915
    for i in ["name", "p_set", "q_set", "e_nom_opt", "lifetime"]:
916
        neighbor_stores = neighbor_stores.drop(i, axis=1)
917
918
    neighbor_stores.to_sql(
919
        "egon_etrago_store",
920
        engine,
921
        schema="grid",
922
        if_exists="append",
923
        index=True,
924
        index_label="store_id",
925
    )
926
927
    # prepare neighboring storage_units for etrago tables
928
    neighbor_storage["scn_name"] = "eGon100RE"
929
930
    # Unify carrier names
931
    neighbor_storage.carrier = neighbor_storage.carrier.str.replace(" ", "_")
932
933
    neighbor_storage.carrier.replace(
934
        {"PHS": "pumped_hydro", "hydro": "reservoir"}, inplace=True
935
    )
936
937
    for i in ["name", "p_nom_opt"]:
938
        neighbor_storage = neighbor_storage.drop(i, axis=1)
939
940
    neighbor_storage.to_sql(
941
        "egon_etrago_storage",
942
        engine,
943
        schema="grid",
944
        if_exists="append",
945
        index=True,
946
        index_label="storage_id",
947
    )
948
949
    # writing neighboring loads_t p_sets to etrago tables
950
951
    neighbor_loads_t_etrago = pd.DataFrame(
952
        columns=["scn_name", "temp_id", "p_set"],
953
        index=neighbor_loads_t.columns,
954
    )
955
    neighbor_loads_t_etrago["scn_name"] = "eGon100RE"
956
    neighbor_loads_t_etrago["temp_id"] = 1
957
    for i in neighbor_loads_t.columns:
958
        neighbor_loads_t_etrago["p_set"][i] = neighbor_loads_t[
959
            i
960
        ].values.tolist()
961
962
    neighbor_loads_t_etrago.to_sql(
963
        "egon_etrago_load_timeseries",
964
        engine,
965
        schema="grid",
966
        if_exists="append",
967
        index=True,
968
        index_label="load_id",
969
    )
970
971
    # writing neighboring generator_t p_max_pu to etrago tables
972
    neighbor_gens_t_etrago = pd.DataFrame(
973
        columns=["scn_name", "temp_id", "p_max_pu"],
974
        index=neighbor_gens_t.columns,
975
    )
976
    neighbor_gens_t_etrago["scn_name"] = "eGon100RE"
977
    neighbor_gens_t_etrago["temp_id"] = 1
978
    for i in neighbor_gens_t.columns:
979
        neighbor_gens_t_etrago["p_max_pu"][i] = neighbor_gens_t[
980
            i
981
        ].values.tolist()
982
983
    neighbor_gens_t_etrago.to_sql(
984
        "egon_etrago_generator_timeseries",
985
        engine,
986
        schema="grid",
987
        if_exists="append",
988
        index=True,
989
        index_label="generator_id",
990
    )
991
992
    # writing neighboring stores_t e_min_pu to etrago tables
993
    neighbor_stores_t_etrago = pd.DataFrame(
994
        columns=["scn_name", "temp_id", "e_min_pu"],
995
        index=neighbor_stores_t.columns,
996
    )
997
    neighbor_stores_t_etrago["scn_name"] = "eGon100RE"
998
    neighbor_stores_t_etrago["temp_id"] = 1
999
    for i in neighbor_stores_t.columns:
1000
        neighbor_stores_t_etrago["e_min_pu"][i] = neighbor_stores_t[
1001
            i
1002
        ].values.tolist()
1003
1004
    neighbor_stores_t_etrago.to_sql(
1005
        "egon_etrago_store_timeseries",
1006
        engine,
1007
        schema="grid",
1008
        if_exists="append",
1009
        index=True,
1010
        index_label="store_id",
1011
    )
1012
1013
    # writing neighboring storage_units inflow to etrago tables
1014
    neighbor_storage_t_etrago = pd.DataFrame(
1015
        columns=["scn_name", "temp_id", "inflow"],
1016
        index=neighbor_storage_t.columns,
1017
    )
1018
    neighbor_storage_t_etrago["scn_name"] = "eGon100RE"
1019
    neighbor_storage_t_etrago["temp_id"] = 1
1020
    for i in neighbor_storage_t.columns:
1021
        neighbor_storage_t_etrago["inflow"][i] = neighbor_storage_t[
1022
            i
1023
        ].values.tolist()
1024
1025
    neighbor_storage_t_etrago.to_sql(
1026
        "egon_etrago_storage_timeseries",
1027
        engine,
1028
        schema="grid",
1029
        if_exists="append",
1030
        index=True,
1031
        index_label="storage_id",
1032
    )
1033
1034
    # writing neighboring lines_t s_max_pu to etrago tables
1035
    if not network.lines_t["s_max_pu"].empty:
1036
        neighbor_lines_t_etrago = pd.DataFrame(
1037
            columns=["scn_name", "s_max_pu"], index=neighbor_lines_t.columns
1038
        )
1039
        neighbor_lines_t_etrago["scn_name"] = "eGon100RE"
1040
1041
        for i in neighbor_lines_t.columns:
1042
            neighbor_lines_t_etrago["s_max_pu"][i] = neighbor_lines_t[
1043
                i
1044
            ].values.tolist()
1045
1046
        neighbor_lines_t_etrago.to_sql(
1047
            "egon_etrago_line_timeseries",
1048
            engine,
1049
            schema="grid",
1050
            if_exists="append",
1051
            index=True,
1052
            index_label="line_id",
1053
        )
1054
1055
1056
def overwrite_H2_pipeline_share():
1057
    """Overwrite retrofitted_CH4pipeline-to-H2pipeline_share value
1058
1059
    Overwrite retrofitted_CH4pipeline-to-H2pipeline_share in the
1060
    scenario parameter table if p-e-s is run.
1061
    This function write in the database and has no return.
1062
1063
    """
1064
    scn_name = "eGon100RE"
1065
    # Select source and target from dataset configuration
1066
    target = egon.data.config.datasets()["pypsa-eur-sec"]["target"]
1067
1068
    n = read_network()
1069
1070
    H2_pipelines = n.links[n.links["carrier"] == "H2 pipeline retrofitted"]
1071
    CH4_pipelines = n.links[n.links["carrier"] == "gas pipeline"]
1072
    H2_pipes_share = np.mean(
1073
        [
1074
            (i / j)
1075
            for i, j in zip(
1076
                H2_pipelines.p_nom_opt.to_list(), CH4_pipelines.p_nom.to_list()
1077
            )
1078
        ]
1079
    )
1080
    logger.info(
1081
        "retrofitted_CH4pipeline-to-H2pipeline_share = " + str(H2_pipes_share)
1082
    )
1083
1084
    parameters = db.select_dataframe(
1085
        f"""
1086
        SELECT *
1087
        FROM {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1088
        WHERE name = '{scn_name}'
1089
        """
1090
    )
1091
1092
    gas_param = parameters.loc[0, "gas_parameters"]
1093
    gas_param["retrofitted_CH4pipeline-to-H2pipeline_share"] = H2_pipes_share
1094
    gas_param = json.dumps(gas_param)
1095
1096
    # Update data in db
1097
    db.execute_sql(
1098
        f"""
1099
    UPDATE {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1100
    SET gas_parameters = '{gas_param}'
1101
    WHERE name = '{scn_name}';
1102
    """
1103
    )
1104
1105
1106
def overwrite_max_gas_generation_overtheyear():
1107
    """Overwrite max_gas_generation_overtheyear in scenario parameter table
1108
1109
    Overwrite max_gas_generation_overtheyear in scenario parameter
1110
    table if p-e-s is run.
1111
    This function write in the database and has no return.
1112
1113
    """
1114
    scn_name = "eGon100RE"
1115
1116
    # Select source and target from dataset configuration
1117
    target = config.datasets()["gas_prod"]["target"]
1118
1119
    if execute_pypsa_eur_sec:
1120
        n = read_network()
1121
        max_value = n.stores[n.stores["carrier"] == "biogas"].loc[
1122
            "DE0 0 biogas", "e_initial"
1123
        ]
1124
1125
        parameters = db.select_dataframe(
1126
            f"""
1127
            SELECT *
1128
            FROM {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1129
            WHERE name = '{scn_name}'
1130
            """
1131
        )
1132
1133
        gas_param = parameters.loc[0, "gas_parameters"]
1134
        gas_param["max_gas_generation_overtheyear"] = {"biogas": max_value}
1135
        gas_param = json.dumps(gas_param)
1136
1137
        # Update data in db
1138
        db.execute_sql(
1139
            f"""
1140
        UPDATE {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1141
        SET gas_parameters = '{gas_param}'
1142
        WHERE name = '{scn_name}';
1143
        """
1144
        )
1145
1146
1147
# Skip execution of pypsa-eur-sec by default until optional task is implemented
1148
execute_pypsa_eur_sec = False
1149
1150
if execute_pypsa_eur_sec:
1151
    tasks = (
1152
        run_pypsa_eur_sec,
1153
        clean_database,
1154
        neighbor_reduction,
1155
        overwrite_H2_pipeline_share,
1156
        overwrite_max_gas_generation_overtheyear,
1157
    )
1158
else:
1159
    tasks = (
1160
        clean_database,
1161
        neighbor_reduction,
1162
    )
1163
1164
1165
class PypsaEurSec(Dataset):
1166
    def __init__(self, dependencies):
1167
        super().__init__(
1168
            name="PypsaEurSec",
1169
            version="0.0.10",
1170
            dependencies=dependencies,
1171
            tasks=tasks,
1172
        )
1173