Passed
Pull Request — dev (#880)
by
unknown
01:32
created

data.datasets.pypsaeursec.clean_database()   A

Complexity

Conditions 3

Size

Total Lines 79
Code Lines 15

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 15
dl 0
loc 79
rs 9.65
c 0
b 0
f 0
cc 3
nop 0

How to fix   Long Method   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
"""The central module containing all code dealing with importing data from
2
the pysa-eur-sec scenario parameter creation
3
"""
4
5
from pathlib import Path
6
from urllib.request import urlretrieve
7
import os
8
import tarfile
9
10
from shapely.geometry import LineString
11
import geopandas as gpd
12
import importlib_resources as resources
13
import pandas as pd
14
import pypsa
15
import yaml
16
17
from egon.data import __path__, db
18
from egon.data.datasets import Dataset
19
from egon.data.datasets.scenario_parameters import get_sector_parameters
20
import egon.data.config
21
import egon.data.subprocess as subproc
22
23
24
def run_pypsa_eur_sec():
25
26
    cwd = Path(".")
27
    filepath = cwd / "run-pypsa-eur-sec"
28
    filepath.mkdir(parents=True, exist_ok=True)
29
30
    pypsa_eur_repos = filepath / "pypsa-eur"
31
    pypsa_eur_repos_data = pypsa_eur_repos / "data"
32
    technology_data_repos = filepath / "technology-data"
33
    pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
34
    pypsa_eur_sec_repos_data = pypsa_eur_sec_repos / "data"
35
36
    if not pypsa_eur_repos.exists():
37
        subproc.run(
38
            [
39
                "git",
40
                "clone",
41
                "--branch",
42
                "v0.4.0",
43
                "https://github.com/PyPSA/pypsa-eur.git",
44
                pypsa_eur_repos,
45
            ]
46
        )
47
48
        # subproc.run(
49
        #     ["git", "checkout", "4e44822514755cdd0289687556547100fba6218b"],
50
        #     cwd=pypsa_eur_repos,
51
        # )
52
53
        file_to_copy = os.path.join(
54
            __path__[0], "datasets", "pypsaeursec", "pypsaeur", "Snakefile"
55
        )
56
57
        subproc.run(["cp", file_to_copy, pypsa_eur_repos])
58
59
        # Read YAML file
60
        path_to_env = pypsa_eur_repos / "envs" / "environment.yaml"
61
        with open(path_to_env, "r") as stream:
62
            env = yaml.safe_load(stream)
63
64
        env["dependencies"].append("gurobi")
65
66
        # Write YAML file
67
        with open(path_to_env, "w", encoding="utf8") as outfile:
68
            yaml.dump(
69
                env, outfile, default_flow_style=False, allow_unicode=True
70
            )
71
72
        datafile = "pypsa-eur-data-bundle.tar.xz"
73
        datapath = pypsa_eur_repos / datafile
74
        if not datapath.exists():
75
            urlretrieve(
76
                f"https://zenodo.org/record/3517935/files/{datafile}", datapath
77
            )
78
            tar = tarfile.open(datapath)
79
            tar.extractall(pypsa_eur_repos_data)
80
81
    if not technology_data_repos.exists():
82
        subproc.run(
83
            [
84
                "git",
85
                "clone",
86
                "--branch",
87
                "v0.3.0",
88
                "https://github.com/PyPSA/technology-data.git",
89
                technology_data_repos,
90
            ]
91
        )
92
93
    if not pypsa_eur_sec_repos.exists():
94
        subproc.run(
95
            [
96
                "git",
97
                "clone",
98
                "https://github.com/openego/pypsa-eur-sec.git",
99
                pypsa_eur_sec_repos,
100
            ]
101
        )
102
103
    datafile = "pypsa-eur-sec-data-bundle.tar.gz"
104
    datapath = pypsa_eur_sec_repos_data / datafile
105
    if not datapath.exists():
106
        urlretrieve(
107
            f"https://zenodo.org/record/5824485/files/{datafile}", datapath
108
        )
109
        tar = tarfile.open(datapath)
110
        tar.extractall(pypsa_eur_sec_repos_data)
111
112
    with open(filepath / "Snakefile", "w") as snakefile:
113
        snakefile.write(
114
            resources.read_text("egon.data.datasets.pypsaeursec", "Snakefile")
115
        )
116
117
    subproc.run(
118
        [
119
            "snakemake",
120
            "-j1",
121
            "--directory",
122
            filepath,
123
            "--snakefile",
124
            filepath / "Snakefile",
125
            "--use-conda",
126
            "--conda-frontend=conda",
127
            "Main",
128
        ]
129
    )
130
131
132
def read_network():
133
134
    # Set execute_pypsa_eur_sec to False until optional task is implemented
135
    execute_pypsa_eur_sec = False
136
    cwd = Path(".")
137
138
    if execute_pypsa_eur_sec:
139
        filepath = cwd / "run-pypsa-eur-sec"
140
        pypsa_eur_sec_repos = filepath / "pypsa-eur-sec"
141
        # Read YAML file
142
        pes_egonconfig = pypsa_eur_sec_repos / "config_egon.yaml"
143
        with open(pes_egonconfig, "r") as stream:
144
            data_config = yaml.safe_load(stream)
145
146
        simpl = data_config["scenario"]["simpl"][0]
147
        clusters = data_config["scenario"]["clusters"][0]
148
        lv = data_config["scenario"]["lv"][0]
149
        opts = data_config["scenario"]["opts"][0]
150
        sector_opts = data_config["scenario"]["sector_opts"][0]
151
        planning_horizons = data_config["scenario"]["planning_horizons"][0]
152
        file = "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc".format(
153
            simpl=simpl,
154
            clusters=clusters,
155
            opts=opts,
156
            lv=lv,
157
            sector_opts=sector_opts,
158
            planning_horizons=planning_horizons,
159
        )
160
161
        target_file = (
162
            pypsa_eur_sec_repos
163
            / "results"
164
            / data_config["run"]
165
            / "postnetworks"
166
            / file
167
        )
168
169
    else:
170
        target_file = (
171
            cwd
172
            / "data_bundle_egon_data"
173
            / "pypsa_eur_sec"
174
            / "2022-05-04-egondata-integration"
175
            / "postnetworks"
176
            / "elec_s_37_lv2.0__Co2L0-3H-T-H-B-I-dist1_2050.nc"
177
        )
178
179
    return pypsa.Network(str(target_file))
180
181
182
def clean_database():
183
    """Remove all components abroad for eGon100RE of the database
184
185
    Remove all components abroad and their associated time series of
186
    the datase for the scenario 'eGon100RE'.
187
188
    Parameters
189
    ----------
190
    None
191
192
    Returns
193
    -------
194
    None
195
196
    """
197
    scn_name = "eGon100RE"
198
199
    comp_one_port = ["load", "generator", "store", "storage"]
200
201
    # delete existing components and associated timeseries
202
    for comp in comp_one_port:
203
        db.execute_sql(
204
            f"""
205
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
206
            WHERE {comp + "_id"} IN (
207
                SELECT {comp + "_id"} FROM {"grid.egon_etrago_" + comp}
208
                WHERE bus IN (
209
                    SELECT bus_id FROM grid.egon_etrago_bus
210
                    WHERE country != 'DE'
211
                    AND scn_name = '{scn_name}')
212
                AND scn_name = '{scn_name}'
213
            );
214
215
            DELETE FROM {"grid.egon_etrago_" + comp}
216
            WHERE bus IN (
217
                SELECT bus_id FROM grid.egon_etrago_bus
218
                WHERE country != 'DE'
219
                AND scn_name = '{scn_name}')
220
            AND scn_name = '{scn_name}';"""
221
        )
222
223
    comp_2_ports = [
224
        "line",
225
        "transformer",
226
        "link",
227
    ]
228
229
    for comp, id in zip(comp_2_ports, ["line_id", "trafo_id", "link_id"]):
230
        db.execute_sql(
231
            f"""
232
            DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"}
233
            WHERE scn_name = '{scn_name}'
234
            AND {id} IN (
235
                SELECT {id} FROM {"grid.egon_etrago_" + comp}
236
            WHERE "bus0" IN (
237
            SELECT bus_id FROM grid.egon_etrago_bus
238
                WHERE country != 'DE'
239
                AND scn_name = '{scn_name}')
240
            AND "bus1" IN (
241
            SELECT bus_id FROM grid.egon_etrago_bus
242
                WHERE country != 'DE'
243
                AND scn_name = '{scn_name}')
244
            );
245
246
            DELETE FROM {"grid.egon_etrago_" + comp}
247
            WHERE scn_name = '{scn_name}'
248
            AND "bus0" IN (
249
            SELECT bus_id FROM grid.egon_etrago_bus
250
                WHERE country != 'DE'
251
                AND scn_name = '{scn_name}')
252
            AND "bus1" IN (
253
            SELECT bus_id FROM grid.egon_etrago_bus
254
                WHERE country != 'DE'
255
                AND scn_name = '{scn_name}')
256
            ;"""
257
        )
258
259
    db.execute_sql(
260
        "DELETE FROM grid.egon_etrago_bus "
261
        "WHERE scn_name = '{scn_name}' "
262
        "AND country <> 'DE'"
263
    )
264
265
266
def neighbor_reduction():
267
268
    network = read_network()
269
270
    wanted_countries = [
271
        "DE",
272
        "AT",
273
        "CH",
274
        "CZ",
275
        "PL",
276
        "SE",
277
        "NO",
278
        "DK",
279
        "GB",
280
        "NL",
281
        "BE",
282
        "FR",
283
        "LU",
284
    ]
285
    foreign_buses = network.buses[
286
        ~network.buses.index.str.contains("|".join(wanted_countries))
287
    ]
288
    network.buses = network.buses.drop(
289
        network.buses.loc[foreign_buses.index].index
290
    )
291
292
    # drop foreign lines and links from the 2nd row
293
294
    network.lines = network.lines.drop(
295
        network.lines[
296
            (network.lines["bus0"].isin(network.buses.index) == False)
297
            & (network.lines["bus1"].isin(network.buses.index) == False)
298
        ].index
299
    )
300
301
    # select all lines which have at bus1 the bus which is kept
302
    lines_cb_1 = network.lines[
303
        (network.lines["bus0"].isin(network.buses.index) == False)
304
    ]
305
306
    # create a load at bus1 with the line's hourly loading
307
    for i, k in zip(lines_cb_1.bus1.values, lines_cb_1.index):
308
        network.add(
309
            "Load",
310
            "slack_fix " + i + " " + k,
311
            bus=i,
312
            p_set=network.lines_t.p1[k],
313
        )
314
        network.loads.carrier.loc[
315
            "slack_fix " + i + " " + k
316
        ] = lines_cb_1.carrier[k]
317
318
    # select all lines which have at bus0 the bus which is kept
319
    lines_cb_0 = network.lines[
320
        (network.lines["bus1"].isin(network.buses.index) == False)
321
    ]
322
323
    # create a load at bus0 with the line's hourly loading
324
    for i, k in zip(lines_cb_0.bus0.values, lines_cb_0.index):
325
        network.add(
326
            "Load",
327
            "slack_fix " + i + " " + k,
328
            bus=i,
329
            p_set=network.lines_t.p0[k],
330
        )
331
        network.loads.carrier.loc[
332
            "slack_fix " + i + " " + k
333
        ] = lines_cb_0.carrier[k]
334
335
    # do the same for links
336
337
    network.links = network.links.drop(
338
        network.links[
339
            (network.links["bus0"].isin(network.buses.index) == False)
340
            & (network.links["bus1"].isin(network.buses.index) == False)
341
        ].index
342
    )
343
344
    # select all links which have at bus1 the bus which is kept
345
    links_cb_1 = network.links[
346
        (network.links["bus0"].isin(network.buses.index) == False)
347
    ]
348
349
    # create a load at bus1 with the link's hourly loading
350
    for i, k in zip(links_cb_1.bus1.values, links_cb_1.index):
351
        network.add(
352
            "Load",
353
            "slack_fix_links " + i + " " + k,
354
            bus=i,
355
            p_set=network.links_t.p1[k],
356
        )
357
        network.loads.carrier.loc[
358
            "slack_fix_links " + i + " " + k
359
        ] = links_cb_1.carrier[k]
360
361
    # select all links which have at bus0 the bus which is kept
362
    links_cb_0 = network.links[
363
        (network.links["bus1"].isin(network.buses.index) == False)
364
    ]
365
366
    # create a load at bus0 with the link's hourly loading
367
    for i, k in zip(links_cb_0.bus0.values, links_cb_0.index):
368
        network.add(
369
            "Load",
370
            "slack_fix_links " + i + " " + k,
371
            bus=i,
372
            p_set=network.links_t.p0[k],
373
        )
374
        network.loads.carrier.loc[
375
            "slack_fix_links " + i + " " + k
376
        ] = links_cb_0.carrier[k]
377
378
    # drop remaining foreign components
379
380
    network.lines = network.lines.drop(
381
        network.lines[
382
            (network.lines["bus0"].isin(network.buses.index) == False)
383
            | (network.lines["bus1"].isin(network.buses.index) == False)
384
        ].index
385
    )
386
387
    network.links = network.links.drop(
388
        network.links[
389
            (network.links["bus0"].isin(network.buses.index) == False)
390
            | (network.links["bus1"].isin(network.buses.index) == False)
391
        ].index
392
    )
393
394
    network.transformers = network.transformers.drop(
395
        network.transformers[
396
            (network.transformers["bus0"].isin(network.buses.index) == False)
397
            | (network.transformers["bus1"].isin(network.buses.index) == False)
398
        ].index
399
    )
400
    network.generators = network.generators.drop(
401
        network.generators[
402
            (network.generators["bus"].isin(network.buses.index) == False)
403
        ].index
404
    )
405
406
    network.loads = network.loads.drop(
407
        network.loads[
408
            (network.loads["bus"].isin(network.buses.index) == False)
409
        ].index
410
    )
411
412
    network.storage_units = network.storage_units.drop(
413
        network.storage_units[
414
            (network.storage_units["bus"].isin(network.buses.index) == False)
415
        ].index
416
    )
417
418
    components = [
419
        "loads",
420
        "generators",
421
        "lines",
422
        "buses",
423
        "transformers",
424
        "links",
425
    ]
426
    for g in components:  # loads_t
427
        h = g + "_t"
428
        nw = getattr(network, h)  # network.loads_t
429
        for i in nw.keys():  # network.loads_t.p
430
            cols = [
431
                j
432
                for j in getattr(nw, i).columns
433
                if j not in getattr(network, g).index
434
            ]
435
            for k in cols:
436
                del getattr(nw, i)[k]
437
438
    # writing components of neighboring countries to etrago tables
439
440
    # Set country tag for all buses
441
    network.buses.country = network.buses.index.str[:2]
442
    neighbors = network.buses[network.buses.country != "DE"]
443
444
    neighbors["new_index"] = (
445
        db.next_etrago_id("bus") + neighbors.reset_index().index
446
    )
447
448
    # lines, the foreign crossborder lines
449
    # (without crossborder lines to Germany!)
450
451
    neighbor_lines = network.lines[
452
        network.lines.bus0.isin(neighbors.index)
453
        & network.lines.bus1.isin(neighbors.index)
454
    ]
455
    if not network.lines_t["s_max_pu"].empty:
456
        neighbor_lines_t = network.lines_t["s_max_pu"][neighbor_lines.index]
457
458
    neighbor_lines.reset_index(inplace=True)
459
    neighbor_lines.bus0 = (
460
        neighbors.loc[neighbor_lines.bus0, "new_index"].reset_index().new_index
461
    )
462
    neighbor_lines.bus1 = (
463
        neighbors.loc[neighbor_lines.bus1, "new_index"].reset_index().new_index
464
    )
465
    neighbor_lines.index += db.next_etrago_id("line")
466
467
    if not network.lines_t["s_max_pu"].empty:
468
        for i in neighbor_lines_t.columns:
0 ignored issues
show
introduced by
The variable neighbor_lines_t does not seem to be defined in case BooleanNotNode on line 455 is False. Are you sure this can never be the case?
Loading history...
469
            new_index = neighbor_lines[neighbor_lines["name"] == i].index
470
            neighbor_lines_t.rename(columns={i: new_index[0]}, inplace=True)
471
472
    # links
473
    neighbor_links = network.links[
474
        network.links.bus0.isin(neighbors.index)
475
        & network.links.bus1.isin(neighbors.index)
476
    ]
477
478
    neighbor_links.reset_index(inplace=True)
479
    neighbor_links.bus0 = (
480
        neighbors.loc[neighbor_links.bus0, "new_index"].reset_index().new_index
481
    )
482
    neighbor_links.bus1 = (
483
        neighbors.loc[neighbor_links.bus1, "new_index"].reset_index().new_index
484
    )
485
    neighbor_links.index += db.next_etrago_id("link")
486
487
    # generators
488
    neighbor_gens = network.generators[
489
        network.generators.bus.isin(neighbors.index)
490
    ]
491
    neighbor_gens_t = network.generators_t["p_max_pu"][
492
        neighbor_gens[
493
            neighbor_gens.index.isin(network.generators_t["p_max_pu"].columns)
494
        ].index
495
    ]
496
497
    neighbor_gens.reset_index(inplace=True)
498
    neighbor_gens.bus = (
499
        neighbors.loc[neighbor_gens.bus, "new_index"].reset_index().new_index
500
    )
501
    neighbor_gens.index += db.next_etrago_id("generator")
502
503
    for i in neighbor_gens_t.columns:
504
        new_index = neighbor_gens[neighbor_gens["name"] == i].index
505
        neighbor_gens_t.rename(columns={i: new_index[0]}, inplace=True)
506
507
    # loads
508
509
    neighbor_loads = network.loads[network.loads.bus.isin(neighbors.index)]
510
    neighbor_loads_t_index = neighbor_loads.index[
511
        neighbor_loads.index.isin(network.loads_t.p_set.columns)
512
    ]
513
    neighbor_loads_t = network.loads_t["p_set"][neighbor_loads_t_index]
514
515
    neighbor_loads.reset_index(inplace=True)
516
    neighbor_loads.bus = (
517
        neighbors.loc[neighbor_loads.bus, "new_index"].reset_index().new_index
518
    )
519
    neighbor_loads.index += db.next_etrago_id("load")
520
521
    for i in neighbor_loads_t.columns:
522
        new_index = neighbor_loads[neighbor_loads["index"] == i].index
523
        neighbor_loads_t.rename(columns={i: new_index[0]}, inplace=True)
524
525
    # stores
526
    neighbor_stores = network.stores[network.stores.bus.isin(neighbors.index)]
527
    neighbor_stores_t_index = neighbor_stores.index[
528
        neighbor_stores.index.isin(network.stores_t.e_min_pu.columns)
529
    ]
530
    neighbor_stores_t = network.stores_t["e_min_pu"][neighbor_stores_t_index]
531
532
    neighbor_stores.reset_index(inplace=True)
533
    neighbor_stores.bus = (
534
        neighbors.loc[neighbor_stores.bus, "new_index"].reset_index().new_index
535
    )
536
    neighbor_stores.index += db.next_etrago_id("store")
537
538
    for i in neighbor_stores_t.columns:
539
        new_index = neighbor_stores[neighbor_stores["name"] == i].index
540
        neighbor_stores_t.rename(columns={i: new_index[0]}, inplace=True)
541
542
    # storage_units
543
    neighbor_storage = network.storage_units[
544
        network.storage_units.bus.isin(neighbors.index)
545
    ]
546
    neighbor_storage_t_index = neighbor_storage.index[
547
        neighbor_storage.index.isin(network.storage_units_t.inflow.columns)
548
    ]
549
    neighbor_storage_t = network.storage_units_t["inflow"][
550
        neighbor_storage_t_index
551
    ]
552
553
    neighbor_storage.reset_index(inplace=True)
554
    neighbor_storage.bus = (
555
        neighbors.loc[neighbor_storage.bus, "new_index"]
556
        .reset_index()
557
        .new_index
558
    )
559
    neighbor_storage.index += db.next_etrago_id("storage")
560
561
    for i in neighbor_storage_t.columns:
562
        new_index = neighbor_storage[neighbor_storage["name"] == i].index
563
        neighbor_storage_t.rename(columns={i: new_index[0]}, inplace=True)
564
565
    # Connect to local database
566
    engine = db.engine()
567
568
    neighbors["scn_name"] = "eGon100RE"
569
    neighbors.index = neighbors["new_index"]
570
571
    # Correct geometry for non AC buses
572
    carriers = set(neighbors.carrier.to_list())
573
    carriers.remove("AC")
574
    non_AC_neighbors = pd.DataFrame()
575
    for c in carriers:
576
        c_neighbors = neighbors[neighbors.carrier == c].set_index(
577
            "location", drop=False
578
        )
579
        for i in ["x", "y"]:
580
            c_neighbors = c_neighbors.drop(i, axis=1)
581
        coordinates = neighbors[neighbors.carrier == "AC"][
582
            ["location", "x", "y"]
583
        ].set_index("location")
584
        c_neighbors = pd.concat([coordinates, c_neighbors], axis=1).set_index(
585
            "new_index", drop=False
586
        )
587
        non_AC_neighbors = non_AC_neighbors.append(c_neighbors)
588
    neighbors = neighbors[neighbors.carrier == "AC"].append(non_AC_neighbors)
589
590
    for i in ["new_index", "control", "generator", "location", "sub_network"]:
591
        neighbors = neighbors.drop(i, axis=1)
592
593
    # Add geometry column
594
    neighbors = (
595
        gpd.GeoDataFrame(
596
            neighbors, geometry=gpd.points_from_xy(neighbors.x, neighbors.y)
597
        )
598
        .rename_geometry("geom")
599
        .set_crs(4326)
600
    )
601
602
    # Unify carrier names
603
    neighbors.carrier = neighbors.carrier.str.replace(" ", "_")
604
    neighbors.carrier.replace(
605
        {
606
            "gas": "CH4",
607
            "gas_for_industry": "CH4_for_industry",
608
        },
609
        inplace=True,
610
    )
611
612
    neighbors.to_postgis(
613
        "egon_etrago_bus",
614
        engine,
615
        schema="grid",
616
        if_exists="append",
617
        index=True,
618
        index_label="bus_id",
619
    )
620
621
    # prepare and write neighboring crossborder lines to etrago tables
622
    def lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE"):
623
        neighbor_lines["scn_name"] = scn
624
        neighbor_lines["cables"] = 3 * neighbor_lines["num_parallel"].astype(
625
            int
626
        )
627
        neighbor_lines["s_nom"] = neighbor_lines["s_nom_min"]
628
629
        for i in [
630
            "name",
631
            "x_pu_eff",
632
            "r_pu_eff",
633
            "sub_network",
634
            "x_pu",
635
            "r_pu",
636
            "g_pu",
637
            "b_pu",
638
            "s_nom_opt",
639
        ]:
640
            neighbor_lines = neighbor_lines.drop(i, axis=1)
641
642
        # Define geometry and add to lines dataframe as 'topo'
643
        gdf = gpd.GeoDataFrame(index=neighbor_lines.index)
644
        gdf["geom_bus0"] = neighbors.geom[neighbor_lines.bus0].values
645
        gdf["geom_bus1"] = neighbors.geom[neighbor_lines.bus1].values
646
        gdf["geometry"] = gdf.apply(
647
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
648
        )
649
650
        neighbor_lines = (
651
            gpd.GeoDataFrame(neighbor_lines, geometry=gdf["geometry"])
652
            .rename_geometry("topo")
653
            .set_crs(4326)
654
        )
655
656
        neighbor_lines["lifetime"] = get_sector_parameters("electricity", scn)[
657
            "lifetime"
658
        ]["ac_ehv_overhead_line"]
659
660
        neighbor_lines.to_postgis(
661
            "egon_etrago_line",
662
            engine,
663
            schema="grid",
664
            if_exists="append",
665
            index=True,
666
            index_label="line_id",
667
        )
668
669
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE")
670
    lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon2035")
671
672
    # prepare and write neighboring crossborder links to etrago tables
673
    def links_to_etrago(neighbor_links, scn="eGon100RE"):
674
        neighbor_links["scn_name"] = scn
675
676
        for i in [
677
            "name",
678
            "geometry",
679
            "tags",
680
            "under_construction",
681
            "underground",
682
            "underwater_fraction",
683
            "bus2",
684
            "bus3",
685
            "bus4",
686
            "efficiency2",
687
            "efficiency3",
688
            "efficiency4",
689
            "lifetime",
690
            "p_nom_opt",
691
        ]:
692
            neighbor_links = neighbor_links.drop(i, axis=1)
693
694
        # Define geometry and add to lines dataframe as 'topo'
695
        gdf = gpd.GeoDataFrame(index=neighbor_links.index)
696
        gdf["geom_bus0"] = neighbors.geom[neighbor_links.bus0].values
697
        gdf["geom_bus1"] = neighbors.geom[neighbor_links.bus1].values
698
        gdf["geometry"] = gdf.apply(
699
            lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1
700
        )
701
702
        neighbor_links = (
703
            gpd.GeoDataFrame(neighbor_links, geometry=gdf["geometry"])
704
            .rename_geometry("topo")
705
            .set_crs(4326)
706
        )
707
708
        # Unify carrier names
709
        neighbor_links.carrier = neighbor_links.carrier.str.replace(" ", "_")
710
711
        neighbor_links.carrier.replace(
712
            {
713
                "H2_Electrolysis": "power_to_H2",
714
                "H2_Fuel_Cell": "H2_to_power",
715
                "H2_pipeline_retrofitted": "H2_retrofit",
716
                "SMR": "CH4_to_H2",
717
                "SMR_CC": "CH4_to_H2_CC",
718
                "Sabatier": "H2_to_CH4",
719
                "gas_for_industry": "CH4_for_industry",
720
                "gas_for_industry_CC": "CH4_for_industry_CC",
721
                "gas_pipeline": "CH4",
722
            },
723
            inplace=True,
724
        )
725
726
        neighbor_links.to_postgis(
727
            "egon_etrago_link",
728
            engine,
729
            schema="grid",
730
            if_exists="append",
731
            index=True,
732
            index_label="link_id",
733
        )
734
735
    links_to_etrago(neighbor_links, "eGon100RE")
736
    links_to_etrago(neighbor_links[neighbor_links.carrier == "DC"], "eGon2035")
737
738
    # prepare neighboring generators for etrago tables
739
    neighbor_gens["scn_name"] = "eGon100RE"
740
    neighbor_gens["p_nom"] = neighbor_gens["p_nom_opt"]
741
    neighbor_gens["p_nom_extendable"] = False
742
743
    # Unify carrier names
744
    neighbor_gens.carrier = neighbor_gens.carrier.str.replace(" ", "_")
745
746
    neighbor_gens.carrier.replace(
747
        {
748
            "onwind": "wind_onshore",
749
            "ror": "run_of_river",
750
            "offwind-ac": "wind_offshore",
751
            "offwind-dc": "wind_offshore",
752
            "urban_central_solar_thermal": "urban_central_solar_thermal_collector",
753
            "residential_rural_solar_thermal": "residential_rural_solar_thermal_collector",
754
            "services_rural_solar_thermal": "services_rural_solar_thermal_collector",
755
        },
756
        inplace=True,
757
    )
758
759
    for i in ["name", "weight", "lifetime", "p_set", "q_set", "p_nom_opt"]:
760
        neighbor_gens = neighbor_gens.drop(i, axis=1)
761
762
    neighbor_gens.to_sql(
763
        "egon_etrago_generator",
764
        engine,
765
        schema="grid",
766
        if_exists="append",
767
        index=True,
768
        index_label="generator_id",
769
    )
770
771
    # prepare neighboring loads for etrago tables
772
    neighbor_loads["scn_name"] = "eGon100RE"
773
774
    # Unify carrier names
775
    neighbor_loads.carrier = neighbor_loads.carrier.str.replace(" ", "_")
776
777
    neighbor_loads.carrier.replace(
778
        {
779
            "electricity": "AC",
780
            "DC": "AC",
781
            "industry_electricity": "AC",
782
            "H2_pipeline": "H2_system_boundary",
783
            "gas_for_industry": "CH4_for_industry",
784
        },
785
        inplace=True,
786
    )
787
788
    for i in ["index", "p_set", "q_set"]:
789
        neighbor_loads = neighbor_loads.drop(i, axis=1)
790
791
    neighbor_loads.to_sql(
792
        "egon_etrago_load",
793
        engine,
794
        schema="grid",
795
        if_exists="append",
796
        index=True,
797
        index_label="load_id",
798
    )
799
800
    # prepare neighboring stores for etrago tables
801
    neighbor_stores["scn_name"] = "eGon100RE"
802
803
    # Unify carrier names
804
    neighbor_stores.carrier = neighbor_stores.carrier.str.replace(" ", "_")
805
806
    neighbor_stores.carrier.replace(
807
        {
808
            "Li_ion": "battery",
809
            "gas": "CH4",
810
        },
811
        inplace=True,
812
    )
813
    neighbor_stores.loc[
814
        (
815
            (neighbor_stores.e_nom_max <= 1e9)
816
            & (neighbor_stores.carrier == "H2")
817
        ),
818
        "carrier",
819
    ] = "H2_underground"
820
    neighbor_stores.loc[
821
        (
822
            (neighbor_stores.e_nom_max > 1e9)
823
            & (neighbor_stores.carrier == "H2")
824
        ),
825
        "carrier",
826
    ] = "H2_overground"
827
828
    for i in ["name", "p_set", "q_set", "e_nom_opt", "lifetime"]:
829
        neighbor_stores = neighbor_stores.drop(i, axis=1)
830
831
    neighbor_stores.to_sql(
832
        "egon_etrago_store",
833
        engine,
834
        schema="grid",
835
        if_exists="append",
836
        index=True,
837
        index_label="store_id",
838
    )
839
840
    # prepare neighboring storage_units for etrago tables
841
    neighbor_storage["scn_name"] = "eGon100RE"
842
843
    # Unify carrier names
844
    neighbor_storage.carrier = neighbor_storage.carrier.str.replace(" ", "_")
845
846
    neighbor_storage.carrier.replace(
847
        {"PHS": "pumped_hydro", "hydro": "reservoir"}, inplace=True
848
    )
849
850
    for i in ["name", "p_nom_opt"]:
851
        neighbor_storage = neighbor_storage.drop(i, axis=1)
852
853
    neighbor_storage.to_sql(
854
        "egon_etrago_storage",
855
        engine,
856
        schema="grid",
857
        if_exists="append",
858
        index=True,
859
        index_label="storage_id",
860
    )
861
862
    # writing neighboring loads_t p_sets to etrago tables
863
864
    neighbor_loads_t_etrago = pd.DataFrame(
865
        columns=["scn_name", "temp_id", "p_set"],
866
        index=neighbor_loads_t.columns,
867
    )
868
    neighbor_loads_t_etrago["scn_name"] = "eGon100RE"
869
    neighbor_loads_t_etrago["temp_id"] = 1
870
    for i in neighbor_loads_t.columns:
871
        neighbor_loads_t_etrago["p_set"][i] = neighbor_loads_t[
872
            i
873
        ].values.tolist()
874
875
    neighbor_loads_t_etrago.to_sql(
876
        "egon_etrago_load_timeseries",
877
        engine,
878
        schema="grid",
879
        if_exists="append",
880
        index=True,
881
        index_label="load_id",
882
    )
883
884
    # writing neighboring generator_t p_max_pu to etrago tables
885
    neighbor_gens_t_etrago = pd.DataFrame(
886
        columns=["scn_name", "temp_id", "p_max_pu"],
887
        index=neighbor_gens_t.columns,
888
    )
889
    neighbor_gens_t_etrago["scn_name"] = "eGon100RE"
890
    neighbor_gens_t_etrago["temp_id"] = 1
891
    for i in neighbor_gens_t.columns:
892
        neighbor_gens_t_etrago["p_max_pu"][i] = neighbor_gens_t[
893
            i
894
        ].values.tolist()
895
896
    neighbor_gens_t_etrago.to_sql(
897
        "egon_etrago_generator_timeseries",
898
        engine,
899
        schema="grid",
900
        if_exists="append",
901
        index=True,
902
        index_label="generator_id",
903
    )
904
905
    # writing neighboring stores_t e_min_pu to etrago tables
906
    neighbor_stores_t_etrago = pd.DataFrame(
907
        columns=["scn_name", "temp_id", "e_min_pu"],
908
        index=neighbor_stores_t.columns,
909
    )
910
    neighbor_stores_t_etrago["scn_name"] = "eGon100RE"
911
    neighbor_stores_t_etrago["temp_id"] = 1
912
    for i in neighbor_stores_t.columns:
913
        neighbor_stores_t_etrago["e_min_pu"][i] = neighbor_stores_t[
914
            i
915
        ].values.tolist()
916
917
    neighbor_stores_t_etrago.to_sql(
918
        "egon_etrago_store_timeseries",
919
        engine,
920
        schema="grid",
921
        if_exists="append",
922
        index=True,
923
        index_label="store_id",
924
    )
925
926
    # writing neighboring storage_units inflow to etrago tables
927
    neighbor_storage_t_etrago = pd.DataFrame(
928
        columns=["scn_name", "temp_id", "inflow"],
929
        index=neighbor_storage_t.columns,
930
    )
931
    neighbor_storage_t_etrago["scn_name"] = "eGon100RE"
932
    neighbor_storage_t_etrago["temp_id"] = 1
933
    for i in neighbor_storage_t.columns:
934
        neighbor_storage_t_etrago["inflow"][i] = neighbor_storage_t[
935
            i
936
        ].values.tolist()
937
938
    neighbor_storage_t_etrago.to_sql(
939
        "egon_etrago_storage_timeseries",
940
        engine,
941
        schema="grid",
942
        if_exists="append",
943
        index=True,
944
        index_label="storage_id",
945
    )
946
947
    # writing neighboring lines_t s_max_pu to etrago tables
948
    if not network.lines_t["s_max_pu"].empty:
949
        neighbor_lines_t_etrago = pd.DataFrame(
950
            columns=["scn_name", "s_max_pu"], index=neighbor_lines_t.columns
951
        )
952
        neighbor_lines_t_etrago["scn_name"] = "eGon100RE"
953
954
        for i in neighbor_lines_t.columns:
955
            neighbor_lines_t_etrago["s_max_pu"][i] = neighbor_lines_t[
956
                i
957
            ].values.tolist()
958
959
        neighbor_lines_t_etrago.to_sql(
960
            "egon_etrago_line_timeseries",
961
            engine,
962
            schema="grid",
963
            if_exists="append",
964
            index=True,
965
            index_label="line_id",
966
        )
967
968
969
# Skip execution of pypsa-eur-sec by default until optional task is implemented
970
execute_pypsa_eur_sec = False
971
972
if execute_pypsa_eur_sec:
973
    tasks = (run_pypsa_eur_sec, clean_database, neighbor_reduction)
974
else:
975
    tasks = (clean_database, neighbor_reduction)
976
977
978
class PypsaEurSec(Dataset):
979
    def __init__(self, dependencies):
980
        super().__init__(
981
            name="PypsaEurSec",
982
            version="0.0.7",
983
            dependencies=dependencies,
984
            tasks=tasks,
985
        )
986