Passed
Pull Request — dev (#848)
by
unknown
01:33
created

data.datasets.pypsaeursec.clean_database()   A

Complexity

Conditions 3

Size

Total Lines 79
Code Lines 15

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 15
dl 0
loc 79
rs 9.65
c 0
b 0
f 0
cc 3
nop 0

How to fix   Long Method   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
"""The central module containing all code dealing with importing data from
2
the pysa-eur-sec scenario parameter creation
3
"""
4
5
from pathlib import Path
6
from urllib.request import urlretrieve
7
import os
8
import tarfile
9
10
from shapely.geometry import LineString
11
import geopandas as gpd
12
import importlib_resources as resources
13
import pandas as pd
14
import pypsa
15
import yaml
16
17
from egon.data import __path__, db
18
from egon.data.datasets import Dataset
19
from egon.data.datasets.scenario_parameters import get_sector_parameters
20
import egon.data.config
21
import egon.data.subprocess as subproc
22
23
24
def run_pypsa_eur_sec():
25
26
    cwd = Path(".")
27
    filepath = cwd / "run-pypsa-eur-sec"
28
    filepath.mkdir(parents=True, exist_ok=True)
29
30
    pypsa_eur_repos = filepath / "pypsa-eur"
31
    pypsa_eur_repos_data = pypsa_eur_repos / "data"
32
    technology_data_repos = filepath / "technology-data"
33
    pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
34
    pypsa_eur_sec_repos_data = pypsa_eur_sec_repos / "data"
35
36
    if not pypsa_eur_repos.exists():
37
        subproc.run(
38
            [
39
                "git",
40
                "clone",
41
                "--branch",
42
                "v0.4.0",
43
                "https://github.com/PyPSA/pypsa-eur.git",
44
                pypsa_eur_repos,
45
            ]
46
        )
47
48
        # subproc.run(
49
        #     ["git", "checkout", "4e44822514755cdd0289687556547100fba6218b"],
50
        #     cwd=pypsa_eur_repos,
51
        # )
52
53
        file_to_copy = os.path.join(
54
            __path__[0], "datasets", "pypsaeursec", "pypsaeur", "Snakefile"
55
        )
56
57
        subproc.run(["cp", file_to_copy, pypsa_eur_repos])
58
59
        # Read YAML file
60
        path_to_env = pypsa_eur_repos / "envs" / "environment.yaml"
61
        with open(path_to_env, "r") as stream:
62
            env = yaml.safe_load(stream)
63
64
        env["dependencies"].append("gurobi")
65
66
        # Write YAML file
67
        with open(path_to_env, "w", encoding="utf8") as outfile:
68
            yaml.dump(
69
                env, outfile, default_flow_style=False, allow_unicode=True
70
            )
71
72
        datafile = "pypsa-eur-data-bundle.tar.xz"
73
        datapath = pypsa_eur_repos / datafile
74
        if not datapath.exists():
75
            urlretrieve(
76
                f"https://zenodo.org/record/3517935/files/{datafile}", datapath
77
            )
78
            tar = tarfile.open(datapath)
79
            tar.extractall(pypsa_eur_repos_data)
80
81
    if not technology_data_repos.exists():
82
        subproc.run(
83
            [
84
                "git",
85
                "clone",
86
                "--branch",
87
                "v0.3.0",
88
                "https://github.com/PyPSA/technology-data.git",
89
                technology_data_repos,
90
            ]
91
        )
92
93
    if not pypsa_eur_sec_repos.exists():
94
        subproc.run(
95
            [
96
                "git",
97
                "clone",
98
                "https://github.com/openego/pypsa-eur-sec.git",
99
                pypsa_eur_sec_repos,
100
            ]
101
        )
102
103
    datafile = "pypsa-eur-sec-data-bundle.tar.gz"
104
    datapath = pypsa_eur_sec_repos_data / datafile
105
    if not datapath.exists():
106
        urlretrieve(
107
            f"https://zenodo.org/record/5824485/files/{datafile}", datapath
108
        )
109
        tar = tarfile.open(datapath)
110
        tar.extractall(pypsa_eur_sec_repos_data)
111
112
    with open(filepath / "Snakefile", "w") as snakefile:
113
        snakefile.write(
114
            resources.read_text("egon.data.datasets.pypsaeursec", "Snakefile")
115
        )
116
117
    subproc.run(
118
        [
119
            "snakemake",
120
            "-j1",
121
            "--directory",
122
            filepath,
123
            "--snakefile",
124
            filepath / "Snakefile",
125
            "--use-conda",
126
            "--conda-frontend=conda",
127
            "Main",
128
        ]
129
    )
130
131
132
def read_network():
133
134
    # Set execute_pypsa_eur_sec to False until optional task is implemented
135
    execute_pypsa_eur_sec = False
136
    cwd = Path(".")
137
138
    if execute_pypsa_eur_sec:
139
        filepath = cwd / "run-pypsa-eur-sec"
140
        pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
141
        # Read YAML file
142
        pes_egonconfig = pypsa_eur_sec_repos / "config_egon.yaml"
143
        with open(pes_egonconfig, "r") as stream:
144
            data_config = yaml.safe_load(stream)
145
146
        simpl = data_config["scenario"]["simpl"][0]
147
        clusters = data_config["scenario"]["clusters"][0]
148
        lv = data_config["scenario"]["lv"][0]
149
        opts = data_config["scenario"]["opts"][0]
150
        sector_opts = data_config["scenario"]["sector_opts"][0]
151
        planning_horizons = data_config["scenario"]["planning_horizons"][0]
152
        file = "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc".format(
153
            simpl=simpl,
154
            clusters=clusters,
155
            opts=opts,
156
            lv=lv,
157
            sector_opts=sector_opts,
158
            planning_horizons=planning_horizons,
159
        )
160
161
        target_file = (
162
            pypsa_eur_sec_repos
163
            / "results"
164
            / data_config["run"]
165
            / "postnetworks"
166
            / file
167
        )
168
169
    else:
170
        target_file = (
171
            cwd
172
            / "data_bundle_egon_data"
173
            / "pypsa_eur_sec"
174
            / "2022-05-04-egondata-integration"
175
            / "postnetworks"
176
            / "elec_s_37_lv2.0__Co2L0-3H-T-H-B-I-dist1_2050.nc"
177
        )
178
179
    return pypsa.Network(str(target_file))
180
181
182
def clean_database():
183
    """Remove all components abroad for eGon100RE of the database
184
185
    Remove all components abroad and their associated time series of
186
    the datase for the scenario 'eGon100RE'.
187
188
    Parameters
189
    ----------
190
    None
191
192
    Returns
193
    -------
194
    None
195
196
    """
197
    scn_name = "eGon100RE"
198
199
    comp_one_port = ["load", "generator", "store", "storage"]
200
201
    # delete existing components and associated timeseries
202
    for comp in comp_one_port:
203
        db.execute_sql(
204
            f"""
205
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
206
            WHERE {comp + "_id"} IN (
207
                SELECT {comp + "_id"} FROM {"grid.egon_etrago_" + comp}
208
                WHERE bus IN (
209
                    SELECT bus_id FROM grid.egon_etrago_bus
210
                    WHERE country != 'DE'
211
                    AND scn_name = '{scn_name}')
212
                AND scn_name = '{scn_name}'
213
            );
214
215
            DELETE FROM {"grid.egon_etrago_" + comp}
216
            WHERE bus IN (
217
                SELECT bus_id FROM grid.egon_etrago_bus
218
                WHERE country != 'DE'
219
                AND scn_name = '{scn_name}')
220
            AND scn_name = '{scn_name}';"""
221
        )
222
223
    comp_2_ports = [
224
        "line",
225
        "transformer",
226
        "link",
227
    ]
228
229
    for comp, id in zip(comp_2_ports, ["line_id", "trafo_id", "link_id"]):
230
        db.execute_sql(
231
            f"""
232
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
233
            WHERE scn_name = '{scn_name}'
234
            AND {id} IN (
235
                SELECT {id} FROM {"grid.egon_etrago_" + comp}
236
            WHERE "bus0" IN (
237
            SELECT bus_id FROM grid.egon_etrago_bus
238
                WHERE country != 'DE'
239
                AND scn_name = '{scn_name}')
240
            OR "bus1" IN (
241
            SELECT bus_id FROM grid.egon_etrago_bus
242
                WHERE country != 'DE'
243
                AND scn_name = '{scn_name}')
244
            );
245
246
            DELETE FROM {"grid.egon_etrago_" + comp}
247
            WHERE scn_name = '{scn_name}'
248
            AND "bus0" IN (
249
            SELECT bus_id FROM grid.egon_etrago_bus
250
                WHERE country != 'DE'
251
                AND scn_name = '{scn_name}')
252
            OR "bus1" IN (
253
            SELECT bus_id FROM grid.egon_etrago_bus
254
                WHERE country != 'DE'
255
                AND scn_name = '{scn_name}')
256
            ;"""
257
        )
258
259
    db.execute_sql(
260
        "DELETE FROM grid.egon_etrago_bus "
261
        "WHERE scn_name = '{scn_name}' "
262
        "AND country <> 'DE'"
263
    )
264
265
266
def neighbor_reduction():
267
268
    network = read_network()
269
270
    wanted_countries = [
271
        "DE",
272
        "AT",
273
        "CH",
274
        "CZ",
275
        "PL",
276
        "SE",
277
        "NO",
278
        "DK",
279
        "GB",
280
        "NL",
281
        "BE",
282
        "FR",
283
        "LU",
284
    ]
285
    foreign_buses = network.buses[
286
        ~network.buses.index.str.contains("|".join(wanted_countries))
287
    ]
288
    network.buses = network.buses.drop(
289
        network.buses.loc[foreign_buses.index].index
290
    )
291
292
    # drop foreign lines and links from the 2nd row
293
294
    network.lines = network.lines.drop(
295
        network.lines[
296
            (network.lines["bus0"].isin(network.buses.index) == False)
297
            & (network.lines["bus1"].isin(network.buses.index) == False)
298
        ].index
299
    )
300
301
    # select all lines which have at bus1 the bus which is kept
302
    lines_cb_1 = network.lines[
303
        (network.lines["bus0"].isin(network.buses.index) == False)
304
    ]
305
306
    # create a load at bus1 with the line's hourly loading
307
    for i, k in zip(lines_cb_1.bus1.values, lines_cb_1.index):
308
        network.add(
309
            "Load",
310
            "slack_fix " + i + " " + k,
311
            bus=i,
312
            p_set=network.lines_t.p1[k],
313
        )
314
        network.loads.carrier.loc[
315
            "slack_fix " + i + " " + k
316
        ] = lines_cb_1.carrier[k]
317
318
    # select all lines which have at bus0 the bus which is kept
319
    lines_cb_0 = network.lines[
320
        (network.lines["bus1"].isin(network.buses.index) == False)
321
    ]
322
323
    # create a load at bus0 with the line's hourly loading
324
    for i, k in zip(lines_cb_0.bus0.values, lines_cb_0.index):
325
        network.add(
326
            "Load",
327
            "slack_fix " + i + " " + k,
328
            bus=i,
329
            p_set=network.lines_t.p0[k],
330
        )
331
        network.loads.carrier.loc[
332
            "slack_fix " + i + " " + k
333
        ] = lines_cb_0.carrier[k]
334
335
    # do the same for links
336
337
    network.links = network.links.drop(
338
        network.links[
339
            (network.links["bus0"].isin(network.buses.index) == False)
340
            & (network.links["bus1"].isin(network.buses.index) == False)
341
        ].index
342
    )
343
344
    # select all links which have at bus1 the bus which is kept
345
    links_cb_1 = network.links[
346
        (network.links["bus0"].isin(network.buses.index) == False)
347
    ]
348
349
    # create a load at bus1 with the link's hourly loading
350
    for i, k in zip(links_cb_1.bus1.values, links_cb_1.index):
351
        network.add(
352
            "Load",
353
            "slack_fix_links " + i + " " + k,
354
            bus=i,
355
            p_set=network.links_t.p1[k],
356
        )
357
        network.loads.carrier.loc[
358
            "slack_fix_links " + i + " " + k
359
        ] = links_cb_1.carrier[k]
360
361
    # select all links which have at bus0 the bus which is kept
362
    links_cb_0 = network.links[
363
        (network.links["bus1"].isin(network.buses.index) == False)
364
    ]
365
366
    # create a load at bus0 with the link's hourly loading
367
    for i, k in zip(links_cb_0.bus0.values, links_cb_0.index):
368
        network.add(
369
            "Load",
370
            "slack_fix_links " + i + " " + k,
371
            bus=i,
372
            p_set=network.links_t.p0[k],
373
        )
374
        network.loads.carrier.loc[
375
            "slack_fix_links " + i + " " + k
376
        ] = links_cb_0.carrier[k]
377
378
    # drop remaining foreign components
379
380
    network.lines = network.lines.drop(
381
        network.lines[
382
            (network.lines["bus0"].isin(network.buses.index) == False)
383
            | (network.lines["bus1"].isin(network.buses.index) == False)
384
        ].index
385
    )
386
387
    network.links = network.links.drop(
388
        network.links[
389
            (network.links["bus0"].isin(network.buses.index) == False)
390
            | (network.links["bus1"].isin(network.buses.index) == False)
391
        ].index
392
    )
393
394
    network.transformers = network.transformers.drop(
395
        network.transformers[
396
            (network.transformers["bus0"].isin(network.buses.index) == False)
397
            | (network.transformers["bus1"].isin(network.buses.index) == False)
398
        ].index
399
    )
400
    network.generators = network.generators.drop(
401
        network.generators[
402
            (network.generators["bus"].isin(network.buses.index) == False)
403
        ].index
404
    )
405
406
    network.loads = network.loads.drop(
407
        network.loads[
408
            (network.loads["bus"].isin(network.buses.index) == False)
409
        ].index
410
    )
411
412
    network.storage_units = network.storage_units.drop(
413
        network.storage_units[
414
            (network.storage_units["bus"].isin(network.buses.index) == False)
415
        ].index
416
    )
417
418
    components = [
419
        "loads",
420
        "generators",
421
        "lines",
422
        "buses",
423
        "transformers",
424
        "links",
425
    ]
426
    for g in components:  # loads_t
427
        h = g + "_t"
428
        nw = getattr(network, h)  # network.loads_t
429
        for i in nw.keys():  # network.loads_t.p
430
            cols = [
431
                j
432
                for j in getattr(nw, i).columns
433
                if j not in getattr(network, g).index
434
            ]
435
            for k in cols:
436
                del getattr(nw, i)[k]
437
438
    # writing components of neighboring countries to etrago tables
439
440
    # Set country tag for all buses
441
    network.buses.country = network.buses.index.str[:2]
442
    neighbors = network.buses[network.buses.country != "DE"]
443
444
    neighbors["new_index"] = (
445
        db.next_etrago_id("bus") + neighbors.reset_index().index
446
    )
447
448
    # lines, the foreign crossborder lines
449
    # (without crossborder lines to Germany!)
450
451
    neighbor_lines = network.lines[
452
        network.lines.bus0.isin(neighbors.index)
453
        & network.lines.bus1.isin(neighbors.index)
454
    ]
455
    if not network.lines_t["s_max_pu"].empty:
456
        neighbor_lines_t = network.lines_t["s_max_pu"][neighbor_lines.index]
457
458
    neighbor_lines.reset_index(inplace=True)
459
    neighbor_lines.bus0 = (
460
        neighbors.loc[neighbor_lines.bus0, "new_index"].reset_index().new_index
461
    )
462
    neighbor_lines.bus1 = (
463
        neighbors.loc[neighbor_lines.bus1, "new_index"].reset_index().new_index
464
    )
465
    neighbor_lines.index += db.next_etrago_id("line")
466
467
    if not network.lines_t["s_max_pu"].empty:
468
        for i in neighbor_lines_t.columns:
0 ignored issues
show
introduced by
The variable neighbor_lines_t does not seem to be defined in case BooleanNotNode on line 455 is False. Are you sure this can never be the case?
Loading history...
469
            new_index = neighbor_lines[neighbor_lines["name"] == i].index
470
            neighbor_lines_t.rename(columns={i: new_index[0]}, inplace=True)
471
472
    # links
473
    neighbor_links = network.links[
474
        network.links.bus0.isin(neighbors.index)
475
        & network.links.bus1.isin(neighbors.index)
476
    ]
477
478
    neighbor_links.reset_index(inplace=True)
479
    neighbor_links.bus0 = (
480
        neighbors.loc[neighbor_links.bus0, "new_index"].reset_index().new_index
481
    )
482
    neighbor_links.bus1 = (
483
        neighbors.loc[neighbor_links.bus1, "new_index"].reset_index().new_index
484
    )
485
    neighbor_links.index += db.next_etrago_id("link")
486
487
    # generators
488
    neighbor_gens = network.generators[
489
        network.generators.bus.isin(neighbors.index)
490
    ]
491
    neighbor_gens_t = network.generators_t["p_max_pu"][
492
        neighbor_gens[
493
            neighbor_gens.index.isin(network.generators_t["p_max_pu"].columns)
494
        ].index
495
    ]
496
497
    neighbor_gens.reset_index(inplace=True)
498
    neighbor_gens.bus = (
499
        neighbors.loc[neighbor_gens.bus, "new_index"].reset_index().new_index
500
    )
501
    neighbor_gens.index += db.next_etrago_id("generator")
502
503
    for i in neighbor_gens_t.columns:
504
        new_index = neighbor_gens[neighbor_gens["name"] == i].index
505
        neighbor_gens_t.rename(columns={i: new_index[0]}, inplace=True)
506
507
    # loads
508
509
    neighbor_loads = network.loads[network.loads.bus.isin(neighbors.index)]
510
    neighbor_loads_t_index = neighbor_loads.index[
511
        neighbor_loads.index.isin(network.loads_t.p_set.columns)
512
    ]
513
    neighbor_loads_t = network.loads_t["p_set"][neighbor_loads_t_index]
514
515
    neighbor_loads.reset_index(inplace=True)
516
    neighbor_loads.bus = (
517
        neighbors.loc[neighbor_loads.bus, "new_index"].reset_index().new_index
518
    )
519
    neighbor_loads.index += db.next_etrago_id("load")
520
521
    for i in neighbor_loads_t.columns:
522
        new_index = neighbor_loads[neighbor_loads["index"] == i].index
523
        neighbor_loads_t.rename(columns={i: new_index[0]}, inplace=True)
524
525
    # stores
526
    neighbor_stores = network.stores[network.stores.bus.isin(neighbors.index)]
527
    neighbor_stores_t_index = neighbor_stores.index[
528
        neighbor_stores.index.isin(network.stores_t.e_min_pu.columns)
529
    ]
530
    neighbor_stores_t = network.stores_t["e_min_pu"][neighbor_stores_t_index]
531
532
    neighbor_stores.reset_index(inplace=True)
533
    neighbor_stores.bus = (
534
        neighbors.loc[neighbor_stores.bus, "new_index"].reset_index().new_index
535
    )
536
    neighbor_stores.index += db.next_etrago_id("store")
537
538
    for i in neighbor_stores_t.columns:
539
        new_index = neighbor_stores[neighbor_stores["name"] == i].index
540
        neighbor_stores_t.rename(columns={i: new_index[0]}, inplace=True)
541
542
    # storage_units
543
    neighbor_storage = network.storage_units[
544
        network.storage_units.bus.isin(neighbors.index)
545
    ]
546
    neighbor_storage_t_index = neighbor_storage.index[
547
        neighbor_storage.index.isin(network.storage_units_t.inflow.columns)
548
    ]
549
    neighbor_storage_t = network.storage_units_t["inflow"][
550
        neighbor_storage_t_index
551
    ]
552
553
    neighbor_storage.reset_index(inplace=True)
554
    neighbor_storage.bus = (
555
        neighbors.loc[neighbor_storage.bus, "new_index"]
556
        .reset_index()
557
        .new_index
558
    )
559
    neighbor_storage.index += db.next_etrago_id("storage")
560
561
    for i in neighbor_storage_t.columns:
562
        new_index = neighbor_storage[neighbor_storage["name"] == i].index
563
        neighbor_storage_t.rename(columns={i: new_index[0]}, inplace=True)
564
565
    # Connect to local database
566
    engine = db.engine()
567
568
    neighbors["scn_name"] = "eGon100RE"
569
    neighbors.index = neighbors["new_index"]
570
571
    # Correct geometry for non AC buses
572
    carriers = set(neighbors.carrier.to_list())
573
    carriers.remove("AC")
574
    non_AC_neighbors = pd.DataFrame()
575
    for c in carriers:
576
        c_neighbors = neighbors[neighbors.carrier == c].set_index(
577
            "location", drop=False
578
        )
579
        for i in ["x", "y"]:
580
            c_neighbors = c_neighbors.drop(i, axis=1)
581
        coordinates = neighbors[neighbors.carrier == "AC"][
582
            ["location", "x", "y"]
583
        ].set_index("location")
584
        c_neighbors = pd.concat([coordinates, c_neighbors], axis=1).set_index(
585
            "new_index", drop=False
586
        )
587
        non_AC_neighbors = non_AC_neighbors.append(c_neighbors)
588
    neighbors = neighbors[neighbors.carrier == "AC"].append(non_AC_neighbors)
589
590
    for i in ["new_index", "control", "generator", "location", "sub_network"]:
591
        neighbors = neighbors.drop(i, axis=1)
592
593
    # Add geometry column
594
    neighbors = (
595
        gpd.GeoDataFrame(
596
            neighbors, geometry=gpd.points_from_xy(neighbors.x, neighbors.y)
597
        )
598
        .rename_geometry("geom")
599
        .set_crs(4326)
600
    )
601
602
    # Unify carrier names
603
    neighbors.carrier.replace(
604
        {
605
            "gas": "CH4",
606
            "gas_for_industry": "CH4_for_industry",
607
        },
608
        inplace=True,
609
    )
610
611
    neighbors.to_postgis(
612
        "egon_etrago_bus",
613
        engine,
614
        schema="grid",
615
        if_exists="append",
616
        index=True,
617
        index_label="bus_id",
618
    )
619
620
    # prepare and write neighboring crossborder lines to etrago tables
621
    def lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE"):
622
        neighbor_lines["scn_name"] = scn
623
        neighbor_lines["cables"] = 3 * neighbor_lines["num_parallel"].astype(
624
            int
625
        )
626
        neighbor_lines["s_nom"] = neighbor_lines["s_nom_min"]
627
628
        for i in [
629
            "name",
630
            "x_pu_eff",
631
            "r_pu_eff",
632
            "sub_network",
633
            "x_pu",
634
            "r_pu",
635
            "g_pu",
636
            "b_pu",
637
            "s_nom_opt",
638
        ]:
639
            neighbor_lines = neighbor_lines.drop(i, axis=1)
640
641
        # Define geometry and add to lines dataframe as 'topo'
642
        gdf = gpd.GeoDataFrame(index=neighbor_lines.index)
643
        gdf["geom_bus0"] = neighbors.geom[neighbor_lines.bus0].values
644
        gdf["geom_bus1"] = neighbors.geom[neighbor_lines.bus1].values
645
        gdf["geometry"] = gdf.apply(
646
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
647
        )
648
649
        neighbor_lines = (
650
            gpd.GeoDataFrame(neighbor_lines, geometry=gdf["geometry"])
651
            .rename_geometry("topo")
652
            .set_crs(4326)
653
        )
654
655
        neighbor_lines["lifetime"] = get_sector_parameters("electricity", scn)[
656
            "lifetime"
657
        ]["ac_ehv_overhead_line"]
658
659
        neighbor_lines.to_postgis(
660
            "egon_etrago_line",
661
            engine,
662
            schema="grid",
663
            if_exists="append",
664
            index=True,
665
            index_label="line_id",
666
        )
667
668
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE")
669
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon2035")
670
671
    # prepare and write neighboring crossborder links to etrago tables
672
    def links_to_etrago(neighbor_links, scn="eGon100RE"):
673
        neighbor_links["scn_name"] = scn
674
675
        for i in [
676
            "name",
677
            "geometry",
678
            "tags",
679
            "under_construction",
680
            "underground",
681
            "underwater_fraction",
682
            "bus2",
683
            "bus3",
684
            "bus4",
685
            "efficiency2",
686
            "efficiency3",
687
            "efficiency4",
688
            "lifetime",
689
            "p_nom_opt",
690
        ]:
691
            neighbor_links = neighbor_links.drop(i, axis=1)
692
693
        # Define geometry and add to lines dataframe as 'topo'
694
        gdf = gpd.GeoDataFrame(index=neighbor_links.index)
695
        gdf["geom_bus0"] = neighbors.geom[neighbor_links.bus0].values
696
        gdf["geom_bus1"] = neighbors.geom[neighbor_links.bus1].values
697
        gdf["geometry"] = gdf.apply(
698
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
699
        )
700
701
        neighbor_links = (
702
            gpd.GeoDataFrame(neighbor_links, geometry=gdf["geometry"])
703
            .rename_geometry("topo")
704
            .set_crs(4326)
705
        )
706
707
        # Unify carrier names
708
        neighbor_links.carrier = neighbor_links.carrier.str.replace(" ", "_")
709
710
        neighbor_links.carrier.replace(
711
            {
712
                "H2_Electrolysis": "power_to_H2",
713
                "H2_Fuel_Cell": "H2_to_power",
714
                "H2_pipeline_retrofitted": "H2_retrofit",
715
                "SMR": "CH4_to_H2",
716
                "SMR_CC": "CH4_to_H2_CC",
717
                "Sabatier": "H2_to_CH4",
718
                "gas_for_industry": "CH4_for_industry",
719
                "gas_for_industry_CC": "CH4_for_industry_CC",
720
                "gas_pipeline": "CH4",
721
            },
722
            inplace=True,
723
        )
724
725
        neighbor_links.to_postgis(
726
            "egon_etrago_link",
727
            engine,
728
            schema="grid",
729
            if_exists="append",
730
            index=True,
731
            index_label="link_id",
732
        )
733
734
    links_to_etrago(neighbor_links, "eGon100RE")
735
    links_to_etrago(neighbor_links[neighbor_links.carrier == "DC"], "eGon2035")
736
737
    # prepare neighboring generators for etrago tables
738
    neighbor_gens["scn_name"] = "eGon100RE"
739
    neighbor_gens["p_nom"] = neighbor_gens["p_nom_opt"]
740
    neighbor_gens["p_nom_extendable"] = False
741
742
    # Unify carrier names
743
    neighbor_gens.carrier = neighbor_gens.carrier.str.replace(" ", "_")
744
745
    neighbor_gens.carrier.replace(
746
        {
747
            "onwind": "wind_onshore",
748
            "ror": "run_of_river",
749
            "offwind-ac": "wind_offshore",
750
            "offwind-dc": "wind_offshore",
751
            "urban_central_solar_thermal": "urban_central_solar_thermal_collector",
752
            "residential_rural_solar_thermal": "residential_rural_solar_thermal_collector",
753
            "services_rural_solar_thermal": "services_rural_solar_thermal_collector",
754
        },
755
        inplace=True,
756
    )
757
758
    for i in ["name", "weight", "lifetime", "p_set", "q_set", "p_nom_opt"]:
759
        neighbor_gens = neighbor_gens.drop(i, axis=1)
760
761
    neighbor_gens.to_sql(
762
        "egon_etrago_generator",
763
        engine,
764
        schema="grid",
765
        if_exists="append",
766
        index=True,
767
        index_label="generator_id",
768
    )
769
770
    # prepare neighboring loads for etrago tables
771
    neighbor_loads["scn_name"] = "eGon100RE"
772
773
    # Unify carrier names
774
    neighbor_loads.carrier = neighbor_loads.carrier.str.replace(" ", "_")
775
776
    neighbor_loads.carrier.replace(
777
        {
778
            "electricity": "AC",
779
            "DC": "AC",
780
            "industry_electricity": "AC",
781
            "H2_pipeline": "H2_system_boundary",
782
            "gas_for_industry": "CH4_for_industry",
783
        },
784
        inplace=True,
785
    )
786
787
    for i in ["index", "p_set", "q_set"]:
788
        neighbor_loads = neighbor_loads.drop(i, axis=1)
789
790
    neighbor_loads.to_sql(
791
        "egon_etrago_load",
792
        engine,
793
        schema="grid",
794
        if_exists="append",
795
        index=True,
796
        index_label="load_id",
797
    )
798
799
    # prepare neighboring stores for etrago tables
800
    neighbor_stores["scn_name"] = "eGon100RE"
801
802
    # Unify carrier names
803
    neighbor_stores.carrier = neighbor_stores.carrier.str.replace(" ", "_")
804
805
    neighbor_stores.carrier.replace(
806
        {
807
            "Li_ion": "battery",
808
            "gas": "CH4",
809
        },
810
        inplace=True,
811
    )
812
    neighbor_stores.loc[
813
        (
814
            (neighbor_stores.e_nom_max <= 1e9)
815
            & (neighbor_stores.carrier == "H2")
816
        ),
817
        "carrier",
818
    ] = "H2_underground"
819
    neighbor_stores.loc[
820
        (
821
            (neighbor_stores.e_nom_max > 1e9)
822
            & (neighbor_stores.carrier == "H2")
823
        ),
824
        "carrier",
825
    ] = "H2_overground"
826
827
    for i in ["name", "p_set", "q_set", "e_nom_opt", "lifetime"]:
828
        neighbor_stores = neighbor_stores.drop(i, axis=1)
829
830
    neighbor_stores.to_sql(
831
        "egon_etrago_store",
832
        engine,
833
        schema="grid",
834
        if_exists="append",
835
        index=True,
836
        index_label="store_id",
837
    )
838
839
    # prepare neighboring storage_units for etrago tables
840
    neighbor_storage["scn_name"] = "eGon100RE"
841
842
    # Unify carrier names
843
    neighbor_storage.carrier = neighbor_storage.carrier.str.replace(" ", "_")
844
845
    neighbor_storage.carrier.replace(
846
        {"PHS": "pumped_hydro", "hydro": "reservoir"}, inplace=True
847
    )
848
849
    for i in ["name", "p_nom_opt"]:
850
        neighbor_storage = neighbor_storage.drop(i, axis=1)
851
852
    neighbor_storage.to_sql(
853
        "egon_etrago_storage",
854
        engine,
855
        schema="grid",
856
        if_exists="append",
857
        index=True,
858
        index_label="storage_id",
859
    )
860
861
    # writing neighboring loads_t p_sets to etrago tables
862
863
    neighbor_loads_t_etrago = pd.DataFrame(
864
        columns=["scn_name", "temp_id", "p_set"],
865
        index=neighbor_loads_t.columns,
866
    )
867
    neighbor_loads_t_etrago["scn_name"] = "eGon100RE"
868
    neighbor_loads_t_etrago["temp_id"] = 1
869
    for i in neighbor_loads_t.columns:
870
        neighbor_loads_t_etrago["p_set"][i] = neighbor_loads_t[
871
            i
872
        ].values.tolist()
873
874
    neighbor_loads_t_etrago.to_sql(
875
        "egon_etrago_load_timeseries",
876
        engine,
877
        schema="grid",
878
        if_exists="append",
879
        index=True,
880
        index_label="load_id",
881
    )
882
883
    # writing neighboring generator_t p_max_pu to etrago tables
884
    neighbor_gens_t_etrago = pd.DataFrame(
885
        columns=["scn_name", "temp_id", "p_max_pu"],
886
        index=neighbor_gens_t.columns,
887
    )
888
    neighbor_gens_t_etrago["scn_name"] = "eGon100RE"
889
    neighbor_gens_t_etrago["temp_id"] = 1
890
    for i in neighbor_gens_t.columns:
891
        neighbor_gens_t_etrago["p_max_pu"][i] = neighbor_gens_t[
892
            i
893
        ].values.tolist()
894
895
    neighbor_gens_t_etrago.to_sql(
896
        "egon_etrago_generator_timeseries",
897
        engine,
898
        schema="grid",
899
        if_exists="append",
900
        index=True,
901
        index_label="generator_id",
902
    )
903
904
    # writing neighboring stores_t e_min_pu to etrago tables
905
    neighbor_stores_t_etrago = pd.DataFrame(
906
        columns=["scn_name", "temp_id", "e_min_pu"],
907
        index=neighbor_stores_t.columns,
908
    )
909
    neighbor_stores_t_etrago["scn_name"] = "eGon100RE"
910
    neighbor_stores_t_etrago["temp_id"] = 1
911
    for i in neighbor_stores_t.columns:
912
        neighbor_stores_t_etrago["e_min_pu"][i] = neighbor_stores_t[
913
            i
914
        ].values.tolist()
915
916
    neighbor_stores_t_etrago.to_sql(
917
        "egon_etrago_store_timeseries",
918
        engine,
919
        schema="grid",
920
        if_exists="append",
921
        index=True,
922
        index_label="store_id",
923
    )
924
925
    # writing neighboring storage_units inflow to etrago tables
926
    neighbor_storage_t_etrago = pd.DataFrame(
927
        columns=["scn_name", "temp_id", "inflow"],
928
        index=neighbor_storage_t.columns,
929
    )
930
    neighbor_storage_t_etrago["scn_name"] = "eGon100RE"
931
    neighbor_storage_t_etrago["temp_id"] = 1
932
    for i in neighbor_storage_t.columns:
933
        neighbor_storage_t_etrago["inflow"][i] = neighbor_storage_t[
934
            i
935
        ].values.tolist()
936
937
    neighbor_storage_t_etrago.to_sql(
938
        "egon_etrago_storage_timeseries",
939
        engine,
940
        schema="grid",
941
        if_exists="append",
942
        index=True,
943
        index_label="storage_id",
944
    )
945
946
    # writing neighboring lines_t s_max_pu to etrago tables
947
    if not network.lines_t["s_max_pu"].empty:
948
        neighbor_lines_t_etrago = pd.DataFrame(
949
            columns=["scn_name", "s_max_pu"], index=neighbor_lines_t.columns
950
        )
951
        neighbor_lines_t_etrago["scn_name"] = "eGon100RE"
952
953
        for i in neighbor_lines_t.columns:
954
            neighbor_lines_t_etrago["s_max_pu"][i] = neighbor_lines_t[
955
                i
956
            ].values.tolist()
957
958
        neighbor_lines_t_etrago.to_sql(
959
            "egon_etrago_line_timeseries",
960
            engine,
961
            schema="grid",
962
            if_exists="append",
963
            index=True,
964
            index_label="line_id",
965
        )
966
967
968
# Skip execution of pypsa-eur-sec by default until optional task is implemented
969
execute_pypsa_eur_sec = False
970
971
if execute_pypsa_eur_sec:
972
    tasks = (run_pypsa_eur_sec, clean_database, neighbor_reduction)
973
else:
974
    tasks = (clean_database, neighbor_reduction)
975
976
977
class PypsaEurSec(Dataset):
978
    def __init__(self, dependencies):
979
        super().__init__(
980
            name="PypsaEurSec",
981
            version="0.0.7",
982
            dependencies=dependencies,
983
            tasks=tasks,
984
        )
985