Passed
Pull Request — dev (#931)
by
unknown
01:41
created

overwrite_max_gas_generation_overtheyear()   A

Complexity

Conditions 2

Size

Total Lines 34
Code Lines 14

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 14
dl 0
loc 34
rs 9.7
c 0
b 0
f 0
cc 2
nop 0
1
"""The central module containing all code dealing with importing data from
2
the pysa-eur-sec scenario parameter creation
3
"""
4
5
from pathlib import Path
6
from urllib.request import urlretrieve
7
import json
8
import os
9
import tarfile
10
11
from shapely.geometry import LineString
12
import geopandas as gpd
13
import importlib_resources as resources
14
import numpy as np
15
import pandas as pd
16
import pypsa
17
import yaml
18
19
from egon.data import __path__, config, db, logger
20
from egon.data.datasets import Dataset
21
from egon.data.datasets.scenario_parameters import get_sector_parameters
22
import egon.data.config
23
import egon.data.subprocess as subproc
24
25
26
def run_pypsa_eur_sec():
27
28
    cwd = Path(".")
29
    filepath = cwd / "run-pypsa-eur-sec"
30
    filepath.mkdir(parents=True, exist_ok=True)
31
32
    pypsa_eur_repos = filepath / "pypsa-eur"
33
    pypsa_eur_repos_data = pypsa_eur_repos / "data"
34
    technology_data_repos = filepath / "technology-data"
35
    pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
36
    pypsa_eur_sec_repos_data = pypsa_eur_sec_repos / "data"
37
38
    if not pypsa_eur_repos.exists():
39
        subproc.run(
40
            [
41
                "git",
42
                "clone",
43
                "--branch",
44
                "v0.4.0",
45
                "https://github.com/PyPSA/pypsa-eur.git",
46
                pypsa_eur_repos,
47
            ]
48
        )
49
50
        # subproc.run(
51
        #     ["git", "checkout", "4e44822514755cdd0289687556547100fba6218b"],
52
        #     cwd=pypsa_eur_repos,
53
        # )
54
55
        file_to_copy = os.path.join(
56
            __path__[0], "datasets", "pypsaeursec", "pypsaeur", "Snakefile"
57
        )
58
59
        subproc.run(["cp", file_to_copy, pypsa_eur_repos])
60
61
        # Read YAML file
62
        path_to_env = pypsa_eur_repos / "envs" / "environment.yaml"
63
        with open(path_to_env, "r") as stream:
64
            env = yaml.safe_load(stream)
65
66
        env["dependencies"].append("gurobi")
67
68
        # Write YAML file
69
        with open(path_to_env, "w", encoding="utf8") as outfile:
70
            yaml.dump(
71
                env, outfile, default_flow_style=False, allow_unicode=True
72
            )
73
74
        datafile = "pypsa-eur-data-bundle.tar.xz"
75
        datapath = pypsa_eur_repos / datafile
76
        if not datapath.exists():
77
            urlretrieve(
78
                f"https://zenodo.org/record/3517935/files/{datafile}", datapath
79
            )
80
            tar = tarfile.open(datapath)
81
            tar.extractall(pypsa_eur_repos_data)
82
83
    if not technology_data_repos.exists():
84
        subproc.run(
85
            [
86
                "git",
87
                "clone",
88
                "--branch",
89
                "v0.3.0",
90
                "https://github.com/PyPSA/technology-data.git",
91
                technology_data_repos,
92
            ]
93
        )
94
95
    if not pypsa_eur_sec_repos.exists():
96
        subproc.run(
97
            [
98
                "git",
99
                "clone",
100
                "https://github.com/openego/pypsa-eur-sec.git",
101
                pypsa_eur_sec_repos,
102
            ]
103
        )
104
105
    datafile = "pypsa-eur-sec-data-bundle.tar.gz"
106
    datapath = pypsa_eur_sec_repos_data / datafile
107
    if not datapath.exists():
108
        urlretrieve(
109
            f"https://zenodo.org/record/5824485/files/{datafile}", datapath
110
        )
111
        tar = tarfile.open(datapath)
112
        tar.extractall(pypsa_eur_sec_repos_data)
113
114
    with open(filepath / "Snakefile", "w") as snakefile:
115
        snakefile.write(
116
            resources.read_text("egon.data.datasets.pypsaeursec", "Snakefile")
117
        )
118
119
    subproc.run(
120
        [
121
            "snakemake",
122
            "-j1",
123
            "--directory",
124
            filepath,
125
            "--snakefile",
126
            filepath / "Snakefile",
127
            "--use-conda",
128
            "--conda-frontend=conda",
129
            "Main",
130
        ]
131
    )
132
133
134
def read_network():
135
136
    # Set execute_pypsa_eur_sec to False until optional task is implemented
137
    execute_pypsa_eur_sec = False
138
    cwd = Path(".")
139
140
    if execute_pypsa_eur_sec:
141
        filepath = cwd / "run-pypsa-eur-sec"
142
        pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
143
        # Read YAML file
144
        pes_egonconfig = pypsa_eur_sec_repos / "config_egon.yaml"
145
        with open(pes_egonconfig, "r") as stream:
146
            data_config = yaml.safe_load(stream)
147
148
        simpl = data_config["scenario"]["simpl"][0]
149
        clusters = data_config["scenario"]["clusters"][0]
150
        lv = data_config["scenario"]["lv"][0]
151
        opts = data_config["scenario"]["opts"][0]
152
        sector_opts = data_config["scenario"]["sector_opts"][0]
153
        planning_horizons = data_config["scenario"]["planning_horizons"][0]
154
        file = "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc".format(
155
            simpl=simpl,
156
            clusters=clusters,
157
            opts=opts,
158
            lv=lv,
159
            sector_opts=sector_opts,
160
            planning_horizons=planning_horizons,
161
        )
162
163
        target_file = (
164
            pypsa_eur_sec_repos
165
            / "results"
166
            / data_config["run"]
167
            / "postnetworks"
168
            / file
169
        )
170
171
    else:
172
        target_file = (
173
            cwd
174
            / "data_bundle_egon_data"
175
            / "pypsa_eur_sec"
176
            / "2022-07-26-egondata-integration"
177
            / "postnetworks"
178
            / "elec_s_37_lv2.0__Co2L0-1H-T-H-B-I-dist1_2050.nc"
179
        )
180
181
    return pypsa.Network(str(target_file))
182
183
184
def clean_database():
185
    """Remove all components abroad for eGon100RE of the database
186
187
    Remove all components abroad and their associated time series of
188
    the datase for the scenario 'eGon100RE'.
189
190
    Parameters
191
    ----------
192
    None
193
194
    Returns
195
    -------
196
    None
197
198
    """
199
    scn_name = "eGon100RE"
200
201
    comp_one_port = ["load", "generator", "store", "storage"]
202
203
    # delete existing components and associated timeseries
204
    for comp in comp_one_port:
205
        db.execute_sql(
206
            f"""
207
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
208
            WHERE {comp + "_id"} IN (
209
                SELECT {comp + "_id"} FROM {"grid.egon_etrago_" + comp}
210
                WHERE bus IN (
211
                    SELECT bus_id FROM grid.egon_etrago_bus
212
                    WHERE country != 'DE'
213
                    AND scn_name = '{scn_name}')
214
                AND scn_name = '{scn_name}'
215
            );
216
217
            DELETE FROM {"grid.egon_etrago_" + comp}
218
            WHERE bus IN (
219
                SELECT bus_id FROM grid.egon_etrago_bus
220
                WHERE country != 'DE'
221
                AND scn_name = '{scn_name}')
222
            AND scn_name = '{scn_name}';"""
223
        )
224
225
    comp_2_ports = [
226
        "line",
227
        "transformer",
228
        "link",
229
    ]
230
231
    for comp, id in zip(comp_2_ports, ["line_id", "trafo_id", "link_id"]):
232
        db.execute_sql(
233
            f"""
234
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
235
            WHERE scn_name = '{scn_name}'
236
            AND {id} IN (
237
                SELECT {id} FROM {"grid.egon_etrago_" + comp}
238
            WHERE "bus0" IN (
239
            SELECT bus_id FROM grid.egon_etrago_bus
240
                WHERE country != 'DE'
241
                AND scn_name = '{scn_name}')
242
            AND "bus1" IN (
243
            SELECT bus_id FROM grid.egon_etrago_bus
244
                WHERE country != 'DE'
245
                AND scn_name = '{scn_name}')
246
            );
247
248
            DELETE FROM {"grid.egon_etrago_" + comp}
249
            WHERE scn_name = '{scn_name}'
250
            AND "bus0" IN (
251
            SELECT bus_id FROM grid.egon_etrago_bus
252
                WHERE country != 'DE'
253
                AND scn_name = '{scn_name}')
254
            AND "bus1" IN (
255
            SELECT bus_id FROM grid.egon_etrago_bus
256
                WHERE country != 'DE'
257
                AND scn_name = '{scn_name}')
258
            ;"""
259
        )
260
261
    db.execute_sql(
262
        f"""
263
        DELETE FROM grid.egon_etrago_bus
264
        WHERE scn_name = '{scn_name}'
265
        AND country != 'DE'
266
        """
267
    )
268
269
270
def neighbor_reduction():
271
272
    network = read_network()
273
274
    network.links.drop("pipe_retrofit", axis="columns", inplace=True)
275
276
    wanted_countries = [
277
        "DE",
278
        "AT",
279
        "CH",
280
        "CZ",
281
        "PL",
282
        "SE",
283
        "NO",
284
        "DK",
285
        "GB",
286
        "NL",
287
        "BE",
288
        "FR",
289
        "LU",
290
    ]
291
    foreign_buses = network.buses[
292
        ~network.buses.index.str.contains("|".join(wanted_countries))
293
    ]
294
    network.buses = network.buses.drop(
295
        network.buses.loc[foreign_buses.index].index
296
    )
297
298
    # drop foreign lines and links from the 2nd row
299
300
    network.lines = network.lines.drop(
301
        network.lines[
302
            (network.lines["bus0"].isin(network.buses.index) == False)
303
            & (network.lines["bus1"].isin(network.buses.index) == False)
304
        ].index
305
    )
306
307
    # select all lines which have at bus1 the bus which is kept
308
    lines_cb_1 = network.lines[
309
        (network.lines["bus0"].isin(network.buses.index) == False)
310
    ]
311
312
    # create a load at bus1 with the line's hourly loading
313
    for i, k in zip(lines_cb_1.bus1.values, lines_cb_1.index):
314
        network.add(
315
            "Load",
316
            "slack_fix " + i + " " + k,
317
            bus=i,
318
            p_set=network.lines_t.p1[k],
319
        )
320
        network.loads.carrier.loc[
321
            "slack_fix " + i + " " + k
322
        ] = lines_cb_1.carrier[k]
323
324
    # select all lines which have at bus0 the bus which is kept
325
    lines_cb_0 = network.lines[
326
        (network.lines["bus1"].isin(network.buses.index) == False)
327
    ]
328
329
    # create a load at bus0 with the line's hourly loading
330
    for i, k in zip(lines_cb_0.bus0.values, lines_cb_0.index):
331
        network.add(
332
            "Load",
333
            "slack_fix " + i + " " + k,
334
            bus=i,
335
            p_set=network.lines_t.p0[k],
336
        )
337
        network.loads.carrier.loc[
338
            "slack_fix " + i + " " + k
339
        ] = lines_cb_0.carrier[k]
340
341
    # do the same for links
342
343
    network.links = network.links.drop(
344
        network.links[
345
            (network.links["bus0"].isin(network.buses.index) == False)
346
            & (network.links["bus1"].isin(network.buses.index) == False)
347
        ].index
348
    )
349
350
    # select all links which have at bus1 the bus which is kept
351
    links_cb_1 = network.links[
352
        (network.links["bus0"].isin(network.buses.index) == False)
353
    ]
354
355
    # create a load at bus1 with the link's hourly loading
356
    for i, k in zip(links_cb_1.bus1.values, links_cb_1.index):
357
        network.add(
358
            "Load",
359
            "slack_fix_links " + i + " " + k,
360
            bus=i,
361
            p_set=network.links_t.p1[k],
362
        )
363
        network.loads.carrier.loc[
364
            "slack_fix_links " + i + " " + k
365
        ] = links_cb_1.carrier[k]
366
367
    # select all links which have at bus0 the bus which is kept
368
    links_cb_0 = network.links[
369
        (network.links["bus1"].isin(network.buses.index) == False)
370
    ]
371
372
    # create a load at bus0 with the link's hourly loading
373
    for i, k in zip(links_cb_0.bus0.values, links_cb_0.index):
374
        network.add(
375
            "Load",
376
            "slack_fix_links " + i + " " + k,
377
            bus=i,
378
            p_set=network.links_t.p0[k],
379
        )
380
        network.loads.carrier.loc[
381
            "slack_fix_links " + i + " " + k
382
        ] = links_cb_0.carrier[k]
383
384
    # drop remaining foreign components
385
386
    network.lines = network.lines.drop(
387
        network.lines[
388
            (network.lines["bus0"].isin(network.buses.index) == False)
389
            | (network.lines["bus1"].isin(network.buses.index) == False)
390
        ].index
391
    )
392
393
    network.links = network.links.drop(
394
        network.links[
395
            (network.links["bus0"].isin(network.buses.index) == False)
396
            | (network.links["bus1"].isin(network.buses.index) == False)
397
        ].index
398
    )
399
400
    network.transformers = network.transformers.drop(
401
        network.transformers[
402
            (network.transformers["bus0"].isin(network.buses.index) == False)
403
            | (network.transformers["bus1"].isin(network.buses.index) == False)
404
        ].index
405
    )
406
    network.generators = network.generators.drop(
407
        network.generators[
408
            (network.generators["bus"].isin(network.buses.index) == False)
409
        ].index
410
    )
411
412
    network.loads = network.loads.drop(
413
        network.loads[
414
            (network.loads["bus"].isin(network.buses.index) == False)
415
        ].index
416
    )
417
418
    network.storage_units = network.storage_units.drop(
419
        network.storage_units[
420
            (network.storage_units["bus"].isin(network.buses.index) == False)
421
        ].index
422
    )
423
424
    components = [
425
        "loads",
426
        "generators",
427
        "lines",
428
        "buses",
429
        "transformers",
430
        "links",
431
    ]
432
    for g in components:  # loads_t
433
        h = g + "_t"
434
        nw = getattr(network, h)  # network.loads_t
435
        for i in nw.keys():  # network.loads_t.p
436
            cols = [
437
                j
438
                for j in getattr(nw, i).columns
439
                if j not in getattr(network, g).index
440
            ]
441
            for k in cols:
442
                del getattr(nw, i)[k]
443
444
    # writing components of neighboring countries to etrago tables
445
446
    # Set country tag for all buses
447
    network.buses.country = network.buses.index.str[:2]
448
    neighbors = network.buses[network.buses.country != "DE"]
449
450
    neighbors["new_index"] = (
451
        db.next_etrago_id("bus") + neighbors.reset_index().index
452
    )
453
454
    # lines, the foreign crossborder lines
455
    # (without crossborder lines to Germany!)
456
457
    neighbor_lines = network.lines[
458
        network.lines.bus0.isin(neighbors.index)
459
        & network.lines.bus1.isin(neighbors.index)
460
    ]
461
    if not network.lines_t["s_max_pu"].empty:
462
        neighbor_lines_t = network.lines_t["s_max_pu"][neighbor_lines.index]
463
464
    neighbor_lines.reset_index(inplace=True)
465
    neighbor_lines.bus0 = (
466
        neighbors.loc[neighbor_lines.bus0, "new_index"].reset_index().new_index
467
    )
468
    neighbor_lines.bus1 = (
469
        neighbors.loc[neighbor_lines.bus1, "new_index"].reset_index().new_index
470
    )
471
    neighbor_lines.index += db.next_etrago_id("line")
472
473
    if not network.lines_t["s_max_pu"].empty:
474
        for i in neighbor_lines_t.columns:
0 ignored issues
show
introduced by
The variable neighbor_lines_t does not seem to be defined in case BooleanNotNode on line 461 is False. Are you sure this can never be the case?
Loading history...
475
            new_index = neighbor_lines[neighbor_lines["name"] == i].index
476
            neighbor_lines_t.rename(columns={i: new_index[0]}, inplace=True)
477
478
    # links
479
    neighbor_links = network.links[
480
        network.links.bus0.isin(neighbors.index)
481
        & network.links.bus1.isin(neighbors.index)
482
    ]
483
484
    neighbor_links.reset_index(inplace=True)
485
    neighbor_links.bus0 = (
486
        neighbors.loc[neighbor_links.bus0, "new_index"].reset_index().new_index
487
    )
488
    neighbor_links.bus1 = (
489
        neighbors.loc[neighbor_links.bus1, "new_index"].reset_index().new_index
490
    )
491
    neighbor_links.index += db.next_etrago_id("link")
492
493
    # generators
494
    neighbor_gens = network.generators[
495
        network.generators.bus.isin(neighbors.index)
496
    ]
497
    neighbor_gens_t = network.generators_t["p_max_pu"][
498
        neighbor_gens[
499
            neighbor_gens.index.isin(network.generators_t["p_max_pu"].columns)
500
        ].index
501
    ]
502
503
    neighbor_gens.reset_index(inplace=True)
504
    neighbor_gens.bus = (
505
        neighbors.loc[neighbor_gens.bus, "new_index"].reset_index().new_index
506
    )
507
    neighbor_gens.index += db.next_etrago_id("generator")
508
509
    for i in neighbor_gens_t.columns:
510
        new_index = neighbor_gens[neighbor_gens["name"] == i].index
511
        neighbor_gens_t.rename(columns={i: new_index[0]}, inplace=True)
512
513
    # loads
514
515
    neighbor_loads = network.loads[network.loads.bus.isin(neighbors.index)]
516
    neighbor_loads_t_index = neighbor_loads.index[
517
        neighbor_loads.index.isin(network.loads_t.p_set.columns)
518
    ]
519
    neighbor_loads_t = network.loads_t["p_set"][neighbor_loads_t_index]
520
521
    neighbor_loads.reset_index(inplace=True)
522
    neighbor_loads.bus = (
523
        neighbors.loc[neighbor_loads.bus, "new_index"].reset_index().new_index
524
    )
525
    neighbor_loads.index += db.next_etrago_id("load")
526
527
    for i in neighbor_loads_t.columns:
528
        new_index = neighbor_loads[neighbor_loads["index"] == i].index
529
        neighbor_loads_t.rename(columns={i: new_index[0]}, inplace=True)
530
531
    # stores
532
    neighbor_stores = network.stores[network.stores.bus.isin(neighbors.index)]
533
    neighbor_stores_t_index = neighbor_stores.index[
534
        neighbor_stores.index.isin(network.stores_t.e_min_pu.columns)
535
    ]
536
    neighbor_stores_t = network.stores_t["e_min_pu"][neighbor_stores_t_index]
537
538
    neighbor_stores.reset_index(inplace=True)
539
    neighbor_stores.bus = (
540
        neighbors.loc[neighbor_stores.bus, "new_index"].reset_index().new_index
541
    )
542
    neighbor_stores.index += db.next_etrago_id("store")
543
544
    for i in neighbor_stores_t.columns:
545
        new_index = neighbor_stores[neighbor_stores["name"] == i].index
546
        neighbor_stores_t.rename(columns={i: new_index[0]}, inplace=True)
547
548
    # storage_units
549
    neighbor_storage = network.storage_units[
550
        network.storage_units.bus.isin(neighbors.index)
551
    ]
552
    neighbor_storage_t_index = neighbor_storage.index[
553
        neighbor_storage.index.isin(network.storage_units_t.inflow.columns)
554
    ]
555
    neighbor_storage_t = network.storage_units_t["inflow"][
556
        neighbor_storage_t_index
557
    ]
558
559
    neighbor_storage.reset_index(inplace=True)
560
    neighbor_storage.bus = (
561
        neighbors.loc[neighbor_storage.bus, "new_index"]
562
        .reset_index()
563
        .new_index
564
    )
565
    neighbor_storage.index += db.next_etrago_id("storage")
566
567
    for i in neighbor_storage_t.columns:
568
        new_index = neighbor_storage[neighbor_storage["name"] == i].index
569
        neighbor_storage_t.rename(columns={i: new_index[0]}, inplace=True)
570
571
    # Connect to local database
572
    engine = db.engine()
573
574
    neighbors["scn_name"] = "eGon100RE"
575
    neighbors.index = neighbors["new_index"]
576
577
    # Correct geometry for non AC buses
578
    carriers = set(neighbors.carrier.to_list())
579
    carriers = [
580
        e for e in carriers if e not in ("AC", "biogas", "gas for industry")
581
    ]
582
    non_AC_neighbors = pd.DataFrame()
583
    for c in carriers:
584
        c_neighbors = neighbors[neighbors.carrier == c].set_index(
585
            "location", drop=False
586
        )
587
        for i in ["x", "y"]:
588
            c_neighbors = c_neighbors.drop(i, axis=1)
589
        coordinates = neighbors[neighbors.carrier == "AC"][
590
            ["location", "x", "y"]
591
        ].set_index("location")
592
        c_neighbors = pd.concat([coordinates, c_neighbors], axis=1).set_index(
593
            "new_index", drop=False
594
        )
595
        non_AC_neighbors = non_AC_neighbors.append(c_neighbors)
596
    neighbors = neighbors[neighbors.carrier == "AC"].append(non_AC_neighbors)
597
598
    for i in ["new_index", "control", "generator", "location", "sub_network"]:
599
        neighbors = neighbors.drop(i, axis=1)
600
601
    # Add geometry column
602
    neighbors = (
603
        gpd.GeoDataFrame(
604
            neighbors, geometry=gpd.points_from_xy(neighbors.x, neighbors.y)
605
        )
606
        .rename_geometry("geom")
607
        .set_crs(4326)
608
    )
609
610
    # Unify carrier names
611
    neighbors.carrier = neighbors.carrier.str.replace(" ", "_")
612
    neighbors.carrier.replace(
613
        {
614
            "gas": "CH4",
615
        },
616
        inplace=True,
617
    )
618
619
    neighbors.to_postgis(
620
        "egon_etrago_bus",
621
        engine,
622
        schema="grid",
623
        if_exists="append",
624
        index=True,
625
        index_label="bus_id",
626
    )
627
628
    # prepare and write neighboring crossborder lines to etrago tables
629
    def lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE"):
630
        neighbor_lines["scn_name"] = scn
631
        neighbor_lines["cables"] = 3 * neighbor_lines["num_parallel"].astype(
632
            int
633
        )
634
        neighbor_lines["s_nom"] = neighbor_lines["s_nom_min"]
635
636
        for i in [
637
            "name",
638
            "x_pu_eff",
639
            "r_pu_eff",
640
            "sub_network",
641
            "x_pu",
642
            "r_pu",
643
            "g_pu",
644
            "b_pu",
645
            "s_nom_opt",
646
        ]:
647
            neighbor_lines = neighbor_lines.drop(i, axis=1)
648
649
        # Define geometry and add to lines dataframe as 'topo'
650
        gdf = gpd.GeoDataFrame(index=neighbor_lines.index)
651
        gdf["geom_bus0"] = neighbors.geom[neighbor_lines.bus0].values
652
        gdf["geom_bus1"] = neighbors.geom[neighbor_lines.bus1].values
653
        gdf["geometry"] = gdf.apply(
654
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
655
        )
656
657
        neighbor_lines = (
658
            gpd.GeoDataFrame(neighbor_lines, geometry=gdf["geometry"])
659
            .rename_geometry("topo")
660
            .set_crs(4326)
661
        )
662
663
        neighbor_lines["lifetime"] = get_sector_parameters("electricity", scn)[
664
            "lifetime"
665
        ]["ac_ehv_overhead_line"]
666
667
        neighbor_lines.to_postgis(
668
            "egon_etrago_line",
669
            engine,
670
            schema="grid",
671
            if_exists="append",
672
            index=True,
673
            index_label="line_id",
674
        )
675
676
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE")
677
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon2035")
678
679
    def links_to_etrago(neighbor_links, scn="eGon100RE", extendable=True):
680
        """Prepare and write neighboring crossborder links to eTraGo table
681
682
        This function prepare the neighboring crossborder links
683
        generated the PyPSA-eur-sec (p-e-s) run by:
684
          * Delete the useless columns
685
          * If extendable is false only (non default case):
686
              * Replace p_nom = 0 with the p_nom_op values (arrising
687
                from the p-e-s optimisation)
688
              * Setting p_nom_extendable to false
689
          * Add geomtry to the links: 'geom' and 'topo' columns
690
          * Change the name of the carriers to have the consistent in
691
            eGon-data
692
693
        The function insert then the link to the eTraGo table and has
694
        no return.
695
696
        Parameters
697
        ----------
698
        neighbor_links : pandas.DataFrame
699
            Dataframe containing the neighboring crossborder links
700
        scn_name : str
701
            Name of the scenario
702
        extendable : bool
703
            Boolean expressing if the links should be extendable or not
704
705
        Returns
706
        -------
707
        None
708
709
        """
710
        neighbor_links["scn_name"] = scn
711
712
        if extendable is True:
713
            neighbor_links = neighbor_links.drop(
714
                columns=[
715
                    "name",
716
                    "geometry",
717
                    "tags",
718
                    "under_construction",
719
                    "underground",
720
                    "underwater_fraction",
721
                    "bus2",
722
                    "bus3",
723
                    "bus4",
724
                    "efficiency2",
725
                    "efficiency3",
726
                    "efficiency4",
727
                    "lifetime",
728
                    "p_nom_opt",
729
                    "pipe_retrofit",
730
                ],
731
                errors="ignore",
732
            )
733
734
        elif extendable is False:
735
            neighbor_links = neighbor_links.drop(
736
                columns=[
737
                    "name",
738
                    "geometry",
739
                    "tags",
740
                    "under_construction",
741
                    "underground",
742
                    "underwater_fraction",
743
                    "bus2",
744
                    "bus3",
745
                    "bus4",
746
                    "efficiency2",
747
                    "efficiency3",
748
                    "efficiency4",
749
                    "lifetime",
750
                    "p_nom",
751
                    "p_nom_extendable",
752
                    "pipe_retrofit",
753
                ],
754
                errors="ignore",
755
            )
756
            neighbor_links = neighbor_links.rename(
757
                columns={"p_nom_opt": "p_nom"}
758
            )
759
            neighbor_links["p_nom_extendable"] = False
760
761
        # Define geometry and add to lines dataframe as 'topo'
762
        gdf = gpd.GeoDataFrame(index=neighbor_links.index)
763
        gdf["geom_bus0"] = neighbors.geom[neighbor_links.bus0].values
764
        gdf["geom_bus1"] = neighbors.geom[neighbor_links.bus1].values
765
        gdf["geometry"] = gdf.apply(
766
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
767
        )
768
769
        neighbor_links = (
770
            gpd.GeoDataFrame(neighbor_links, geometry=gdf["geometry"])
771
            .rename_geometry("topo")
772
            .set_crs(4326)
773
        )
774
775
        # Unify carrier names
776
        neighbor_links.carrier = neighbor_links.carrier.str.replace(" ", "_")
777
778
        neighbor_links.carrier.replace(
779
            {
780
                "H2_Electrolysis": "power_to_H2",
781
                "H2_Fuel_Cell": "H2_to_power",
782
                "H2_pipeline_retrofitted": "H2_retrofit",
783
                "SMR": "CH4_to_H2",
784
                "Sabatier": "H2_to_CH4",
785
                "gas_pipeline": "CH4",
786
            },
787
            inplace=True,
788
        )
789
790
        neighbor_links.to_postgis(
791
            "egon_etrago_link",
792
            engine,
793
            schema="grid",
794
            if_exists="append",
795
            index=True,
796
            index_label="link_id",
797
        )
798
799
    non_extendable_links_carriers = [
800
        "H2 pipeline retrofitted",
801
        "gas pipeline",
802
    ]
803
804
    map_CH4_for_ind_buses = neighbor_links[
805
        neighbor_links.carrier == "gas for industry"
806
    ].set_index("bus1")["bus0"]
807
    map_biogas_to_gas = neighbor_links[
808
        neighbor_links.carrier == "biogas to gas"
809
    ].set_index("bus0")["bus1"]
810
811
    # delete unwanted carriers for eTraGo
812
    excluded_carriers = [
813
        "gas for industry",
814
        "gas for industry CC",
815
        "SMR CC",
816
        "biogas to gas",
817
    ]
818
    neighbor_links = neighbor_links[
819
        ~neighbor_links.carrier.isin(excluded_carriers)
820
    ]
821
822
    links_to_etrago(
823
        neighbor_links[
824
            ~neighbor_links.carrier.isin(non_extendable_links_carriers)
825
        ],
826
        "eGon100RE",
827
    )
828
    links_to_etrago(
829
        neighbor_links[
830
            neighbor_links.carrier.isin(non_extendable_links_carriers)
831
        ],
832
        "eGon100RE",
833
        extendable=False,
834
    )
835
836
    links_to_etrago(neighbor_links[neighbor_links.carrier == "DC"], "eGon2035")
837
838
    # prepare neighboring generators for etrago tables
839
    neighbor_gens["scn_name"] = "eGon100RE"
840
    neighbor_gens["p_nom"] = neighbor_gens["p_nom_opt"]
841
    neighbor_gens["p_nom_extendable"] = False
842
843
    # Unify carrier names
844
    neighbor_gens.carrier = neighbor_gens.carrier.str.replace(" ", "_")
845
846
    neighbor_gens.carrier.replace(
847
        {
848
            "onwind": "wind_onshore",
849
            "ror": "run_of_river",
850
            "offwind-ac": "wind_offshore",
851
            "offwind-dc": "wind_offshore",
852
            "urban_central_solar_thermal": "urban_central_solar_thermal_collector",
853
            "residential_rural_solar_thermal": "residential_rural_solar_thermal_collector",
854
            "services_rural_solar_thermal": "services_rural_solar_thermal_collector",
855
        },
856
        inplace=True,
857
    )
858
859
    for i in ["name", "weight", "lifetime", "p_set", "q_set", "p_nom_opt"]:
860
        neighbor_gens = neighbor_gens.drop(i, axis=1)
861
862
    neighbor_gens.to_sql(
863
        "egon_etrago_generator",
864
        engine,
865
        schema="grid",
866
        if_exists="append",
867
        index=True,
868
        index_label="generator_id",
869
    )
870
871
    # prepare neighboring loads for etrago tables
872
    neighbor_loads["scn_name"] = "eGon100RE"
873
874
    # Unify carrier names
875
    neighbor_loads.carrier = neighbor_loads.carrier.str.replace(" ", "_")
876
877
    neighbor_loads.carrier.replace(
878
        {
879
            "electricity": "AC",
880
            "DC": "AC",
881
            "industry_electricity": "AC",
882
            "H2_pipeline_retrofitted": "H2_system_boundary",
883
            "gas_pipeline": "CH4_system_boundary",
884
            "gas_for_industry": "CH4_for_industry",
885
        },
886
        inplace=True,
887
    )
888
889
    # Attribute CH4 bus to CH4_for_industry loads
890
    neighbor_loads["bus"] = neighbor_loads["bus"].replace(
891
        map_CH4_for_ind_buses
892
    )
893
894
    neighbor_loads = neighbor_loads.drop(
895
        columns=["index"],
896
        errors="ignore",
897
    )
898
899
    neighbor_loads.to_sql(
900
        "egon_etrago_load",
901
        engine,
902
        schema="grid",
903
        if_exists="append",
904
        index=True,
905
        index_label="load_id",
906
    )
907
908
    # prepare neighboring stores for etrago tables
909
    neighbor_stores["scn_name"] = "eGon100RE"
910
911
    # Unify carrier names
912
    neighbor_stores.carrier = neighbor_stores.carrier.str.replace(" ", "_")
913
914
    neighbor_stores.carrier.replace(
915
        {
916
            "Li_ion": "battery",
917
            "gas": "CH4",
918
        },
919
        inplace=True,
920
    )
921
    neighbor_stores.loc[
922
        (
923
            (neighbor_stores.e_nom_max <= 1e9)
924
            & (neighbor_stores.carrier == "H2")
925
        ),
926
        "carrier",
927
    ] = "H2_underground"
928
    neighbor_stores.loc[
929
        (
930
            (neighbor_stores.e_nom_max > 1e9)
931
            & (neighbor_stores.carrier == "H2")
932
        ),
933
        "carrier",
934
    ] = "H2_overground"
935
936
    for i in ["name", "p_set", "q_set", "e_nom_opt", "lifetime"]:
937
        neighbor_stores = neighbor_stores.drop(i, axis=1)
938
939
    # Attribute CH4 bus to CH4_for_industry loads
940
    neighbor_stores["bus"] = neighbor_stores["bus"].replace(map_biogas_to_gas)
941
942
    neighbor_stores.to_sql(
943
        "egon_etrago_store",
944
        engine,
945
        schema="grid",
946
        if_exists="append",
947
        index=True,
948
        index_label="store_id",
949
    )
950
951
    # prepare neighboring storage_units for etrago tables
952
    neighbor_storage["scn_name"] = "eGon100RE"
953
954
    # Unify carrier names
955
    neighbor_storage.carrier = neighbor_storage.carrier.str.replace(" ", "_")
956
957
    neighbor_storage.carrier.replace(
958
        {"PHS": "pumped_hydro", "hydro": "reservoir"}, inplace=True
959
    )
960
961
    for i in ["name", "p_nom_opt"]:
962
        neighbor_storage = neighbor_storage.drop(i, axis=1)
963
964
    neighbor_storage.to_sql(
965
        "egon_etrago_storage",
966
        engine,
967
        schema="grid",
968
        if_exists="append",
969
        index=True,
970
        index_label="storage_id",
971
    )
972
973
    # writing neighboring loads_t p_sets to etrago tables
974
975
    neighbor_loads_t_etrago = pd.DataFrame(
976
        columns=["scn_name", "temp_id", "p_set"],
977
        index=neighbor_loads_t.columns,
978
    )
979
    neighbor_loads_t_etrago["scn_name"] = "eGon100RE"
980
    neighbor_loads_t_etrago["temp_id"] = 1
981
    for i in neighbor_loads_t.columns:
982
        neighbor_loads_t_etrago["p_set"][i] = neighbor_loads_t[
983
            i
984
        ].values.tolist()
985
986
    neighbor_loads_t_etrago.to_sql(
987
        "egon_etrago_load_timeseries",
988
        engine,
989
        schema="grid",
990
        if_exists="append",
991
        index=True,
992
        index_label="load_id",
993
    )
994
995
    # writing neighboring generator_t p_max_pu to etrago tables
996
    neighbor_gens_t_etrago = pd.DataFrame(
997
        columns=["scn_name", "temp_id", "p_max_pu"],
998
        index=neighbor_gens_t.columns,
999
    )
1000
    neighbor_gens_t_etrago["scn_name"] = "eGon100RE"
1001
    neighbor_gens_t_etrago["temp_id"] = 1
1002
    for i in neighbor_gens_t.columns:
1003
        neighbor_gens_t_etrago["p_max_pu"][i] = neighbor_gens_t[
1004
            i
1005
        ].values.tolist()
1006
1007
    neighbor_gens_t_etrago.to_sql(
1008
        "egon_etrago_generator_timeseries",
1009
        engine,
1010
        schema="grid",
1011
        if_exists="append",
1012
        index=True,
1013
        index_label="generator_id",
1014
    )
1015
1016
    # writing neighboring stores_t e_min_pu to etrago tables
1017
    neighbor_stores_t_etrago = pd.DataFrame(
1018
        columns=["scn_name", "temp_id", "e_min_pu"],
1019
        index=neighbor_stores_t.columns,
1020
    )
1021
    neighbor_stores_t_etrago["scn_name"] = "eGon100RE"
1022
    neighbor_stores_t_etrago["temp_id"] = 1
1023
    for i in neighbor_stores_t.columns:
1024
        neighbor_stores_t_etrago["e_min_pu"][i] = neighbor_stores_t[
1025
            i
1026
        ].values.tolist()
1027
1028
    neighbor_stores_t_etrago.to_sql(
1029
        "egon_etrago_store_timeseries",
1030
        engine,
1031
        schema="grid",
1032
        if_exists="append",
1033
        index=True,
1034
        index_label="store_id",
1035
    )
1036
1037
    # writing neighboring storage_units inflow to etrago tables
1038
    neighbor_storage_t_etrago = pd.DataFrame(
1039
        columns=["scn_name", "temp_id", "inflow"],
1040
        index=neighbor_storage_t.columns,
1041
    )
1042
    neighbor_storage_t_etrago["scn_name"] = "eGon100RE"
1043
    neighbor_storage_t_etrago["temp_id"] = 1
1044
    for i in neighbor_storage_t.columns:
1045
        neighbor_storage_t_etrago["inflow"][i] = neighbor_storage_t[
1046
            i
1047
        ].values.tolist()
1048
1049
    neighbor_storage_t_etrago.to_sql(
1050
        "egon_etrago_storage_timeseries",
1051
        engine,
1052
        schema="grid",
1053
        if_exists="append",
1054
        index=True,
1055
        index_label="storage_id",
1056
    )
1057
1058
    # writing neighboring lines_t s_max_pu to etrago tables
1059
    if not network.lines_t["s_max_pu"].empty:
1060
        neighbor_lines_t_etrago = pd.DataFrame(
1061
            columns=["scn_name", "s_max_pu"], index=neighbor_lines_t.columns
1062
        )
1063
        neighbor_lines_t_etrago["scn_name"] = "eGon100RE"
1064
1065
        for i in neighbor_lines_t.columns:
1066
            neighbor_lines_t_etrago["s_max_pu"][i] = neighbor_lines_t[
1067
                i
1068
            ].values.tolist()
1069
1070
        neighbor_lines_t_etrago.to_sql(
1071
            "egon_etrago_line_timeseries",
1072
            engine,
1073
            schema="grid",
1074
            if_exists="append",
1075
            index=True,
1076
            index_label="line_id",
1077
        )
1078
1079
1080
def overwrite_H2_pipeline_share():
1081
    """Overwrite retrofitted_CH4pipeline-to-H2pipeline_share value
1082
1083
    Overwrite retrofitted_CH4pipeline-to-H2pipeline_share in the
1084
    scenario parameter table if p-e-s is run.
1085
    This function write in the database and has no return.
1086
1087
    """
1088
    scn_name = "eGon100RE"
1089
    # Select source and target from dataset configuration
1090
    target = egon.data.config.datasets()["pypsa-eur-sec"]["target"]
1091
1092
    n = read_network()
1093
1094
    H2_pipelines = n.links[n.links["carrier"] == "H2 pipeline retrofitted"]
1095
    CH4_pipelines = n.links[n.links["carrier"] == "gas pipeline"]
1096
    H2_pipes_share = np.mean(
1097
        [
1098
            (i / j)
1099
            for i, j in zip(
1100
                H2_pipelines.p_nom_opt.to_list(), CH4_pipelines.p_nom.to_list()
1101
            )
1102
        ]
1103
    )
1104
    logger.info(
1105
        "retrofitted_CH4pipeline-to-H2pipeline_share = " + str(H2_pipes_share)
1106
    )
1107
1108
    parameters = db.select_dataframe(
1109
        f"""
1110
        SELECT *
1111
        FROM {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1112
        WHERE name = '{scn_name}'
1113
        """
1114
    )
1115
1116
    gas_param = parameters.loc[0, "gas_parameters"]
1117
    gas_param["retrofitted_CH4pipeline-to-H2pipeline_share"] = H2_pipes_share
1118
    gas_param = json.dumps(gas_param)
1119
1120
    # Update data in db
1121
    db.execute_sql(
1122
        f"""
1123
    UPDATE {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1124
    SET gas_parameters = '{gas_param}'
1125
    WHERE name = '{scn_name}';
1126
    """
1127
    )
1128
1129
1130
def overwrite_max_gas_generation_overtheyear():
1131
    """Overwrite max_gas_generation_overtheyear in scenario parameter table
1132
1133
    Overwrite max_gas_generation_overtheyear in scenario parameter
1134
    table if p-e-s is run.
1135
    This function write in the database and has no return.
1136
1137
    """
1138
    scn_name = "eGon100RE"
1139
1140
    # Select source and target from dataset configuration
1141
    target = config.datasets()["gas_prod"]["target"]
1142
1143
    if execute_pypsa_eur_sec:
1144
        n = read_network()
1145
        max_value = n.stores[n.stores["carrier"] == "biogas"].loc[
1146
            "DE0 0 biogas", "e_initial"
1147
        ]
1148
1149
        parameters = db.select_dataframe(
1150
            f"""
1151
            SELECT *
1152
            FROM {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1153
            WHERE name = '{scn_name}'
1154
            """
1155
        )
1156
1157
        gas_param = parameters.loc[0, "gas_parameters"]
1158
        gas_param["max_gas_generation_overtheyear"] = {"biogas": max_value}
1159
        gas_param = json.dumps(gas_param)
1160
1161
        # Update data in db
1162
        db.execute_sql(
1163
            f"""
1164
        UPDATE {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']}
1165
        SET gas_parameters = '{gas_param}'
1166
        WHERE name = '{scn_name}';
1167
        """
1168
        )
1169
1170
1171
# Skip execution of pypsa-eur-sec by default until optional task is implemented
1172
execute_pypsa_eur_sec = False
1173
1174
if execute_pypsa_eur_sec:
1175
    tasks = (
1176
        run_pypsa_eur_sec,
1177
        clean_database,
1178
        neighbor_reduction,
1179
        overwrite_H2_pipeline_share,
1180
        overwrite_max_gas_generation_overtheyear,
1181
    )
1182
else:
1183
    tasks = (
1184
        clean_database,
1185
        neighbor_reduction,
1186
    )
1187
1188
1189
class PypsaEurSec(Dataset):
1190
    def __init__(self, dependencies):
1191
        super().__init__(
1192
            name="PypsaEurSec",
1193
            version="0.0.11",
1194
            dependencies=dependencies,
1195
            tasks=tasks,
1196
        )
1197