1
|
|
|
"""The central module containing all code dealing with importing data from |
2
|
|
|
the pysa-eur-sec scenario parameter creation |
3
|
|
|
""" |
4
|
|
|
|
5
|
|
|
from pathlib import Path |
6
|
|
|
from urllib.request import urlretrieve |
7
|
|
|
import json |
8
|
|
|
import os |
9
|
|
|
import tarfile |
10
|
|
|
|
11
|
|
|
from shapely.geometry import LineString |
12
|
|
|
import geopandas as gpd |
13
|
|
|
import importlib_resources as resources |
14
|
|
|
import numpy as np |
15
|
|
|
import pandas as pd |
16
|
|
|
import pypsa |
17
|
|
|
import yaml |
18
|
|
|
|
19
|
|
|
from egon.data import __path__, db, logger, config |
20
|
|
|
from egon.data.datasets import Dataset |
21
|
|
|
from egon.data.datasets.scenario_parameters import get_sector_parameters |
22
|
|
|
import egon.data.config |
23
|
|
|
import egon.data.subprocess as subproc |
24
|
|
|
|
25
|
|
|
|
26
|
|
|
def run_pypsa_eur_sec(): |
27
|
|
|
cwd = Path(".") |
28
|
|
|
filepath = cwd / "run-pypsa-eur-sec" |
29
|
|
|
filepath.mkdir(parents=True, exist_ok=True) |
30
|
|
|
|
31
|
|
|
pypsa_eur_repos = filepath / "pypsa-eur" |
32
|
|
|
pypsa_eur_repos_data = pypsa_eur_repos / "data" |
33
|
|
|
technology_data_repos = filepath / "technology-data" |
34
|
|
|
pypsa_eur_sec_repos = filepath / "pypsa-eur-sec" |
35
|
|
|
pypsa_eur_sec_repos_data = pypsa_eur_sec_repos / "data" |
36
|
|
|
|
37
|
|
|
if not pypsa_eur_repos.exists(): |
38
|
|
|
subproc.run( |
39
|
|
|
[ |
40
|
|
|
"git", |
41
|
|
|
"clone", |
42
|
|
|
"--branch", |
43
|
|
|
"v0.4.0", |
44
|
|
|
"https://github.com/PyPSA/pypsa-eur.git", |
45
|
|
|
pypsa_eur_repos, |
46
|
|
|
] |
47
|
|
|
) |
48
|
|
|
|
49
|
|
|
# subproc.run( |
50
|
|
|
# ["git", "checkout", "4e44822514755cdd0289687556547100fba6218b"], |
51
|
|
|
# cwd=pypsa_eur_repos, |
52
|
|
|
# ) |
53
|
|
|
|
54
|
|
|
file_to_copy = os.path.join( |
55
|
|
|
__path__[0], "datasets", "pypsaeursec", "pypsaeur", "Snakefile" |
56
|
|
|
) |
57
|
|
|
|
58
|
|
|
subproc.run(["cp", file_to_copy, pypsa_eur_repos]) |
59
|
|
|
|
60
|
|
|
# Read YAML file |
61
|
|
|
path_to_env = pypsa_eur_repos / "envs" / "environment.yaml" |
62
|
|
|
with open(path_to_env, "r") as stream: |
63
|
|
|
env = yaml.safe_load(stream) |
64
|
|
|
|
65
|
|
|
env["dependencies"].append("gurobi") |
66
|
|
|
|
67
|
|
|
# Write YAML file |
68
|
|
|
with open(path_to_env, "w", encoding="utf8") as outfile: |
69
|
|
|
yaml.dump( |
70
|
|
|
env, outfile, default_flow_style=False, allow_unicode=True |
71
|
|
|
) |
72
|
|
|
|
73
|
|
|
datafile = "pypsa-eur-data-bundle.tar.xz" |
74
|
|
|
datapath = pypsa_eur_repos / datafile |
75
|
|
|
if not datapath.exists(): |
76
|
|
|
urlretrieve( |
77
|
|
|
f"https://zenodo.org/record/3517935/files/{datafile}", datapath |
78
|
|
|
) |
79
|
|
|
tar = tarfile.open(datapath) |
80
|
|
|
tar.extractall(pypsa_eur_repos_data) |
81
|
|
|
|
82
|
|
|
if not technology_data_repos.exists(): |
83
|
|
|
subproc.run( |
84
|
|
|
[ |
85
|
|
|
"git", |
86
|
|
|
"clone", |
87
|
|
|
"--branch", |
88
|
|
|
"v0.3.0", |
89
|
|
|
"https://github.com/PyPSA/technology-data.git", |
90
|
|
|
technology_data_repos, |
91
|
|
|
] |
92
|
|
|
) |
93
|
|
|
|
94
|
|
|
if not pypsa_eur_sec_repos.exists(): |
95
|
|
|
subproc.run( |
96
|
|
|
[ |
97
|
|
|
"git", |
98
|
|
|
"clone", |
99
|
|
|
"https://github.com/openego/pypsa-eur-sec.git", |
100
|
|
|
pypsa_eur_sec_repos, |
101
|
|
|
] |
102
|
|
|
) |
103
|
|
|
|
104
|
|
|
datafile = "pypsa-eur-sec-data-bundle.tar.gz" |
105
|
|
|
datapath = pypsa_eur_sec_repos_data / datafile |
106
|
|
|
if not datapath.exists(): |
107
|
|
|
urlretrieve( |
108
|
|
|
f"https://zenodo.org/record/5824485/files/{datafile}", datapath |
109
|
|
|
) |
110
|
|
|
tar = tarfile.open(datapath) |
111
|
|
|
tar.extractall(pypsa_eur_sec_repos_data) |
112
|
|
|
|
113
|
|
|
with open(filepath / "Snakefile", "w") as snakefile: |
114
|
|
|
snakefile.write( |
115
|
|
|
resources.read_text("egon.data.datasets.pypsaeursec", "Snakefile") |
116
|
|
|
) |
117
|
|
|
|
118
|
|
|
subproc.run( |
119
|
|
|
[ |
120
|
|
|
"snakemake", |
121
|
|
|
"-j1", |
122
|
|
|
"--directory", |
123
|
|
|
filepath, |
124
|
|
|
"--snakefile", |
125
|
|
|
filepath / "Snakefile", |
126
|
|
|
"--use-conda", |
127
|
|
|
"--conda-frontend=conda", |
128
|
|
|
"Main", |
129
|
|
|
] |
130
|
|
|
) |
131
|
|
|
|
132
|
|
|
|
133
|
|
|
def read_network(): |
134
|
|
|
# Set execute_pypsa_eur_sec to False until optional task is implemented |
135
|
|
|
execute_pypsa_eur_sec = False |
136
|
|
|
cwd = Path(".") |
137
|
|
|
|
138
|
|
|
if execute_pypsa_eur_sec: |
139
|
|
|
filepath = cwd / "run-pypsa-eur-sec" |
140
|
|
|
pypsa_eur_sec_repos = filepath / "pypsa-eur-sec" |
141
|
|
|
# Read YAML file |
142
|
|
|
pes_egonconfig = pypsa_eur_sec_repos / "config_egon.yaml" |
143
|
|
|
with open(pes_egonconfig, "r") as stream: |
144
|
|
|
data_config = yaml.safe_load(stream) |
145
|
|
|
|
146
|
|
|
simpl = data_config["scenario"]["simpl"][0] |
147
|
|
|
clusters = data_config["scenario"]["clusters"][0] |
148
|
|
|
lv = data_config["scenario"]["lv"][0] |
149
|
|
|
opts = data_config["scenario"]["opts"][0] |
150
|
|
|
sector_opts = data_config["scenario"]["sector_opts"][0] |
151
|
|
|
planning_horizons = data_config["scenario"]["planning_horizons"][0] |
152
|
|
|
file = "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc".format( |
153
|
|
|
simpl=simpl, |
154
|
|
|
clusters=clusters, |
155
|
|
|
opts=opts, |
156
|
|
|
lv=lv, |
157
|
|
|
sector_opts=sector_opts, |
158
|
|
|
planning_horizons=planning_horizons, |
159
|
|
|
) |
160
|
|
|
|
161
|
|
|
target_file = ( |
162
|
|
|
pypsa_eur_sec_repos |
163
|
|
|
/ "results" |
164
|
|
|
/ data_config["run"] |
165
|
|
|
/ "postnetworks" |
166
|
|
|
/ file |
167
|
|
|
) |
168
|
|
|
|
169
|
|
|
else: |
170
|
|
|
target_file = ( |
171
|
|
|
cwd |
172
|
|
|
/ "data_bundle_egon_data" |
173
|
|
|
/ "pypsa_eur_sec" |
174
|
|
|
/ "2022-07-26-egondata-integration" |
175
|
|
|
/ "postnetworks" |
176
|
|
|
/ "elec_s_37_lv2.0__Co2L0-1H-T-H-B-I-dist1_2050.nc" |
177
|
|
|
) |
178
|
|
|
|
179
|
|
|
return pypsa.Network(str(target_file)) |
180
|
|
|
|
181
|
|
|
|
182
|
|
|
def clean_database(): |
183
|
|
|
"""Remove all components abroad for eGon100RE of the database |
184
|
|
|
|
185
|
|
|
Remove all components abroad and their associated time series of |
186
|
|
|
the datase for the scenario 'eGon100RE'. |
187
|
|
|
|
188
|
|
|
Parameters |
189
|
|
|
---------- |
190
|
|
|
None |
191
|
|
|
|
192
|
|
|
Returns |
193
|
|
|
------- |
194
|
|
|
None |
195
|
|
|
|
196
|
|
|
""" |
197
|
|
|
scn_name = "eGon100RE" |
198
|
|
|
|
199
|
|
|
comp_one_port = ["load", "generator", "store", "storage"] |
200
|
|
|
|
201
|
|
|
# delete existing components and associated timeseries |
202
|
|
|
for comp in comp_one_port: |
203
|
|
|
db.execute_sql( |
204
|
|
|
f""" |
205
|
|
|
DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"} |
206
|
|
|
WHERE {comp + "_id"} IN ( |
207
|
|
|
SELECT {comp + "_id"} FROM {"grid.egon_etrago_" + comp} |
208
|
|
|
WHERE bus IN ( |
209
|
|
|
SELECT bus_id FROM grid.egon_etrago_bus |
210
|
|
|
WHERE country != 'DE' |
211
|
|
|
AND scn_name = '{scn_name}') |
212
|
|
|
AND scn_name = '{scn_name}' |
213
|
|
|
); |
214
|
|
|
|
215
|
|
|
DELETE FROM {"grid.egon_etrago_" + comp} |
216
|
|
|
WHERE bus IN ( |
217
|
|
|
SELECT bus_id FROM grid.egon_etrago_bus |
218
|
|
|
WHERE country != 'DE' |
219
|
|
|
AND scn_name = '{scn_name}') |
220
|
|
|
AND scn_name = '{scn_name}';""" |
221
|
|
|
) |
222
|
|
|
|
223
|
|
|
comp_2_ports = [ |
224
|
|
|
"line", |
225
|
|
|
"transformer", |
226
|
|
|
"link", |
227
|
|
|
] |
228
|
|
|
|
229
|
|
|
for comp, id in zip(comp_2_ports, ["line_id", "trafo_id", "link_id"]): |
230
|
|
|
db.execute_sql( |
231
|
|
|
f""" |
232
|
|
|
DELETE FROM {"grid.egon_etrago_" + comp + "_timeseries"} |
233
|
|
|
WHERE scn_name = '{scn_name}' |
234
|
|
|
AND {id} IN ( |
235
|
|
|
SELECT {id} FROM {"grid.egon_etrago_" + comp} |
236
|
|
|
WHERE "bus0" IN ( |
237
|
|
|
SELECT bus_id FROM grid.egon_etrago_bus |
238
|
|
|
WHERE country != 'DE' |
239
|
|
|
AND scn_name = '{scn_name}') |
240
|
|
|
AND "bus1" IN ( |
241
|
|
|
SELECT bus_id FROM grid.egon_etrago_bus |
242
|
|
|
WHERE country != 'DE' |
243
|
|
|
AND scn_name = '{scn_name}') |
244
|
|
|
); |
245
|
|
|
|
246
|
|
|
DELETE FROM {"grid.egon_etrago_" + comp} |
247
|
|
|
WHERE scn_name = '{scn_name}' |
248
|
|
|
AND "bus0" IN ( |
249
|
|
|
SELECT bus_id FROM grid.egon_etrago_bus |
250
|
|
|
WHERE country != 'DE' |
251
|
|
|
AND scn_name = '{scn_name}') |
252
|
|
|
AND "bus1" IN ( |
253
|
|
|
SELECT bus_id FROM grid.egon_etrago_bus |
254
|
|
|
WHERE country != 'DE' |
255
|
|
|
AND scn_name = '{scn_name}') |
256
|
|
|
;""" |
257
|
|
|
) |
258
|
|
|
|
259
|
|
|
db.execute_sql( |
260
|
|
|
"DELETE FROM grid.egon_etrago_bus " |
261
|
|
|
"WHERE scn_name = '{scn_name}' " |
262
|
|
|
"AND country <> 'DE'" |
263
|
|
|
) |
264
|
|
|
|
265
|
|
|
|
266
|
|
|
def neighbor_reduction(): |
267
|
|
|
network = read_network() |
268
|
|
|
|
269
|
|
|
network.links.drop("pipe_retrofit", axis="columns", inplace=True) |
270
|
|
|
|
271
|
|
|
wanted_countries = [ |
272
|
|
|
"DE", |
273
|
|
|
"AT", |
274
|
|
|
"CH", |
275
|
|
|
"CZ", |
276
|
|
|
"PL", |
277
|
|
|
"SE", |
278
|
|
|
"NO", |
279
|
|
|
"DK", |
280
|
|
|
"GB", |
281
|
|
|
"NL", |
282
|
|
|
"BE", |
283
|
|
|
"FR", |
284
|
|
|
"LU", |
285
|
|
|
] |
286
|
|
|
foreign_buses = network.buses[ |
287
|
|
|
~network.buses.index.str.contains("|".join(wanted_countries)) |
288
|
|
|
] |
289
|
|
|
network.buses = network.buses.drop( |
290
|
|
|
network.buses.loc[foreign_buses.index].index |
291
|
|
|
) |
292
|
|
|
|
293
|
|
|
# drop foreign lines and links from the 2nd row |
294
|
|
|
|
295
|
|
|
network.lines = network.lines.drop( |
296
|
|
|
network.lines[ |
297
|
|
|
(network.lines["bus0"].isin(network.buses.index) == False) |
298
|
|
|
& (network.lines["bus1"].isin(network.buses.index) == False) |
299
|
|
|
].index |
300
|
|
|
) |
301
|
|
|
|
302
|
|
|
# select all lines which have at bus1 the bus which is kept |
303
|
|
|
lines_cb_1 = network.lines[ |
304
|
|
|
(network.lines["bus0"].isin(network.buses.index) == False) |
305
|
|
|
] |
306
|
|
|
|
307
|
|
|
# create a load at bus1 with the line's hourly loading |
308
|
|
|
for i, k in zip(lines_cb_1.bus1.values, lines_cb_1.index): |
309
|
|
|
network.add( |
310
|
|
|
"Load", |
311
|
|
|
"slack_fix " + i + " " + k, |
312
|
|
|
bus=i, |
313
|
|
|
p_set=network.lines_t.p1[k], |
314
|
|
|
) |
315
|
|
|
network.loads.carrier.loc[ |
316
|
|
|
"slack_fix " + i + " " + k |
317
|
|
|
] = lines_cb_1.carrier[k] |
318
|
|
|
|
319
|
|
|
# select all lines which have at bus0 the bus which is kept |
320
|
|
|
lines_cb_0 = network.lines[ |
321
|
|
|
(network.lines["bus1"].isin(network.buses.index) == False) |
322
|
|
|
] |
323
|
|
|
|
324
|
|
|
# create a load at bus0 with the line's hourly loading |
325
|
|
|
for i, k in zip(lines_cb_0.bus0.values, lines_cb_0.index): |
326
|
|
|
network.add( |
327
|
|
|
"Load", |
328
|
|
|
"slack_fix " + i + " " + k, |
329
|
|
|
bus=i, |
330
|
|
|
p_set=network.lines_t.p0[k], |
331
|
|
|
) |
332
|
|
|
network.loads.carrier.loc[ |
333
|
|
|
"slack_fix " + i + " " + k |
334
|
|
|
] = lines_cb_0.carrier[k] |
335
|
|
|
|
336
|
|
|
# do the same for links |
337
|
|
|
|
338
|
|
|
network.links = network.links.drop( |
339
|
|
|
network.links[ |
340
|
|
|
(network.links["bus0"].isin(network.buses.index) == False) |
341
|
|
|
& (network.links["bus1"].isin(network.buses.index) == False) |
342
|
|
|
].index |
343
|
|
|
) |
344
|
|
|
|
345
|
|
|
# select all links which have at bus1 the bus which is kept |
346
|
|
|
links_cb_1 = network.links[ |
347
|
|
|
(network.links["bus0"].isin(network.buses.index) == False) |
348
|
|
|
] |
349
|
|
|
|
350
|
|
|
# create a load at bus1 with the link's hourly loading |
351
|
|
|
for i, k in zip(links_cb_1.bus1.values, links_cb_1.index): |
352
|
|
|
network.add( |
353
|
|
|
"Load", |
354
|
|
|
"slack_fix_links " + i + " " + k, |
355
|
|
|
bus=i, |
356
|
|
|
p_set=network.links_t.p1[k], |
357
|
|
|
) |
358
|
|
|
network.loads.carrier.loc[ |
359
|
|
|
"slack_fix_links " + i + " " + k |
360
|
|
|
] = links_cb_1.carrier[k] |
361
|
|
|
|
362
|
|
|
# select all links which have at bus0 the bus which is kept |
363
|
|
|
links_cb_0 = network.links[ |
364
|
|
|
(network.links["bus1"].isin(network.buses.index) == False) |
365
|
|
|
] |
366
|
|
|
|
367
|
|
|
# create a load at bus0 with the link's hourly loading |
368
|
|
|
for i, k in zip(links_cb_0.bus0.values, links_cb_0.index): |
369
|
|
|
network.add( |
370
|
|
|
"Load", |
371
|
|
|
"slack_fix_links " + i + " " + k, |
372
|
|
|
bus=i, |
373
|
|
|
p_set=network.links_t.p0[k], |
374
|
|
|
) |
375
|
|
|
network.loads.carrier.loc[ |
376
|
|
|
"slack_fix_links " + i + " " + k |
377
|
|
|
] = links_cb_0.carrier[k] |
378
|
|
|
|
379
|
|
|
# drop remaining foreign components |
380
|
|
|
|
381
|
|
|
network.lines = network.lines.drop( |
382
|
|
|
network.lines[ |
383
|
|
|
(network.lines["bus0"].isin(network.buses.index) == False) |
384
|
|
|
| (network.lines["bus1"].isin(network.buses.index) == False) |
385
|
|
|
].index |
386
|
|
|
) |
387
|
|
|
|
388
|
|
|
network.links = network.links.drop( |
389
|
|
|
network.links[ |
390
|
|
|
(network.links["bus0"].isin(network.buses.index) == False) |
391
|
|
|
| (network.links["bus1"].isin(network.buses.index) == False) |
392
|
|
|
].index |
393
|
|
|
) |
394
|
|
|
|
395
|
|
|
network.transformers = network.transformers.drop( |
396
|
|
|
network.transformers[ |
397
|
|
|
(network.transformers["bus0"].isin(network.buses.index) == False) |
398
|
|
|
| (network.transformers["bus1"].isin(network.buses.index) == False) |
399
|
|
|
].index |
400
|
|
|
) |
401
|
|
|
network.generators = network.generators.drop( |
402
|
|
|
network.generators[ |
403
|
|
|
(network.generators["bus"].isin(network.buses.index) == False) |
404
|
|
|
].index |
405
|
|
|
) |
406
|
|
|
|
407
|
|
|
network.loads = network.loads.drop( |
408
|
|
|
network.loads[ |
409
|
|
|
(network.loads["bus"].isin(network.buses.index) == False) |
410
|
|
|
].index |
411
|
|
|
) |
412
|
|
|
|
413
|
|
|
network.storage_units = network.storage_units.drop( |
414
|
|
|
network.storage_units[ |
415
|
|
|
(network.storage_units["bus"].isin(network.buses.index) == False) |
416
|
|
|
].index |
417
|
|
|
) |
418
|
|
|
|
419
|
|
|
components = [ |
420
|
|
|
"loads", |
421
|
|
|
"generators", |
422
|
|
|
"lines", |
423
|
|
|
"buses", |
424
|
|
|
"transformers", |
425
|
|
|
"links", |
426
|
|
|
] |
427
|
|
|
for g in components: # loads_t |
428
|
|
|
h = g + "_t" |
429
|
|
|
nw = getattr(network, h) # network.loads_t |
430
|
|
|
for i in nw.keys(): # network.loads_t.p |
431
|
|
|
cols = [ |
432
|
|
|
j |
433
|
|
|
for j in getattr(nw, i).columns |
434
|
|
|
if j not in getattr(network, g).index |
435
|
|
|
] |
436
|
|
|
for k in cols: |
437
|
|
|
del getattr(nw, i)[k] |
438
|
|
|
|
439
|
|
|
# writing components of neighboring countries to etrago tables |
440
|
|
|
|
441
|
|
|
# Set country tag for all buses |
442
|
|
|
network.buses.country = network.buses.index.str[:2] |
443
|
|
|
neighbors = network.buses[network.buses.country != "DE"] |
444
|
|
|
|
445
|
|
|
neighbors["new_index"] = ( |
446
|
|
|
db.next_etrago_id("bus") + neighbors.reset_index().index |
447
|
|
|
) |
448
|
|
|
|
449
|
|
|
# lines, the foreign crossborder lines |
450
|
|
|
# (without crossborder lines to Germany!) |
451
|
|
|
|
452
|
|
|
neighbor_lines = network.lines[ |
453
|
|
|
network.lines.bus0.isin(neighbors.index) |
454
|
|
|
& network.lines.bus1.isin(neighbors.index) |
455
|
|
|
] |
456
|
|
|
if not network.lines_t["s_max_pu"].empty: |
457
|
|
|
neighbor_lines_t = network.lines_t["s_max_pu"][neighbor_lines.index] |
458
|
|
|
|
459
|
|
|
neighbor_lines.reset_index(inplace=True) |
460
|
|
|
neighbor_lines.bus0 = ( |
461
|
|
|
neighbors.loc[neighbor_lines.bus0, "new_index"].reset_index().new_index |
462
|
|
|
) |
463
|
|
|
neighbor_lines.bus1 = ( |
464
|
|
|
neighbors.loc[neighbor_lines.bus1, "new_index"].reset_index().new_index |
465
|
|
|
) |
466
|
|
|
neighbor_lines.index += db.next_etrago_id("line") |
467
|
|
|
|
468
|
|
|
if not network.lines_t["s_max_pu"].empty: |
469
|
|
|
for i in neighbor_lines_t.columns: |
|
|
|
|
470
|
|
|
new_index = neighbor_lines[neighbor_lines["name"] == i].index |
471
|
|
|
neighbor_lines_t.rename(columns={i: new_index[0]}, inplace=True) |
472
|
|
|
|
473
|
|
|
# links |
474
|
|
|
neighbor_links = network.links[ |
475
|
|
|
network.links.bus0.isin(neighbors.index) |
476
|
|
|
& network.links.bus1.isin(neighbors.index) |
477
|
|
|
] |
478
|
|
|
|
479
|
|
|
neighbor_links.reset_index(inplace=True) |
480
|
|
|
neighbor_links.bus0 = ( |
481
|
|
|
neighbors.loc[neighbor_links.bus0, "new_index"].reset_index().new_index |
482
|
|
|
) |
483
|
|
|
neighbor_links.bus1 = ( |
484
|
|
|
neighbors.loc[neighbor_links.bus1, "new_index"].reset_index().new_index |
485
|
|
|
) |
486
|
|
|
neighbor_links.index += db.next_etrago_id("link") |
487
|
|
|
|
488
|
|
|
# generators |
489
|
|
|
neighbor_gens = network.generators[ |
490
|
|
|
network.generators.bus.isin(neighbors.index) |
491
|
|
|
] |
492
|
|
|
neighbor_gens_t = network.generators_t["p_max_pu"][ |
493
|
|
|
neighbor_gens[ |
494
|
|
|
neighbor_gens.index.isin(network.generators_t["p_max_pu"].columns) |
495
|
|
|
].index |
496
|
|
|
] |
497
|
|
|
|
498
|
|
|
neighbor_gens.reset_index(inplace=True) |
499
|
|
|
neighbor_gens.bus = ( |
500
|
|
|
neighbors.loc[neighbor_gens.bus, "new_index"].reset_index().new_index |
501
|
|
|
) |
502
|
|
|
neighbor_gens.index += db.next_etrago_id("generator") |
503
|
|
|
|
504
|
|
|
for i in neighbor_gens_t.columns: |
505
|
|
|
new_index = neighbor_gens[neighbor_gens["Generator"] == i].index |
506
|
|
|
neighbor_gens_t.rename(columns={i: new_index[0]}, inplace=True) |
507
|
|
|
|
508
|
|
|
# loads |
509
|
|
|
|
510
|
|
|
neighbor_loads = network.loads[network.loads.bus.isin(neighbors.index)] |
511
|
|
|
neighbor_loads_t_index = neighbor_loads.index[ |
512
|
|
|
neighbor_loads.index.isin(network.loads_t.p_set.columns) |
513
|
|
|
] |
514
|
|
|
neighbor_loads_t = network.loads_t["p_set"][neighbor_loads_t_index] |
515
|
|
|
|
516
|
|
|
neighbor_loads.reset_index(inplace=True) |
517
|
|
|
neighbor_loads.bus = ( |
518
|
|
|
neighbors.loc[neighbor_loads.bus, "new_index"].reset_index().new_index |
519
|
|
|
) |
520
|
|
|
neighbor_loads.index += db.next_etrago_id("load") |
521
|
|
|
|
522
|
|
|
for i in neighbor_loads_t.columns: |
523
|
|
|
new_index = neighbor_loads[neighbor_loads["Load"] == i].index |
524
|
|
|
neighbor_loads_t.rename(columns={i: new_index[0]}, inplace=True) |
525
|
|
|
|
526
|
|
|
# stores |
527
|
|
|
neighbor_stores = network.stores[network.stores.bus.isin(neighbors.index)] |
528
|
|
|
neighbor_stores_t_index = neighbor_stores.index[ |
529
|
|
|
neighbor_stores.index.isin(network.stores_t.e_min_pu.columns) |
530
|
|
|
] |
531
|
|
|
neighbor_stores_t = network.stores_t["e_min_pu"][neighbor_stores_t_index] |
532
|
|
|
|
533
|
|
|
neighbor_stores.reset_index(inplace=True) |
534
|
|
|
neighbor_stores.bus = ( |
535
|
|
|
neighbors.loc[neighbor_stores.bus, "new_index"].reset_index().new_index |
536
|
|
|
) |
537
|
|
|
neighbor_stores.index += db.next_etrago_id("store") |
538
|
|
|
|
539
|
|
|
for i in neighbor_stores_t.columns: |
540
|
|
|
new_index = neighbor_stores[neighbor_stores["Store"] == i].index |
541
|
|
|
neighbor_stores_t.rename(columns={i: new_index[0]}, inplace=True) |
542
|
|
|
|
543
|
|
|
# storage_units |
544
|
|
|
neighbor_storage = network.storage_units[ |
545
|
|
|
network.storage_units.bus.isin(neighbors.index) |
546
|
|
|
] |
547
|
|
|
neighbor_storage_t_index = neighbor_storage.index[ |
548
|
|
|
neighbor_storage.index.isin(network.storage_units_t.inflow.columns) |
549
|
|
|
] |
550
|
|
|
neighbor_storage_t = network.storage_units_t["inflow"][ |
551
|
|
|
neighbor_storage_t_index |
552
|
|
|
] |
553
|
|
|
|
554
|
|
|
neighbor_storage.reset_index(inplace=True) |
555
|
|
|
neighbor_storage.bus = ( |
556
|
|
|
neighbors.loc[neighbor_storage.bus, "new_index"] |
557
|
|
|
.reset_index() |
558
|
|
|
.new_index |
559
|
|
|
) |
560
|
|
|
neighbor_storage.index += db.next_etrago_id("storage") |
561
|
|
|
|
562
|
|
|
for i in neighbor_storage_t.columns: |
563
|
|
|
new_index = neighbor_storage[ |
564
|
|
|
neighbor_storage["StorageUnit"] == i |
565
|
|
|
].index |
566
|
|
|
neighbor_storage_t.rename(columns={i: new_index[0]}, inplace=True) |
567
|
|
|
|
568
|
|
|
# Connect to local database |
569
|
|
|
engine = db.engine() |
570
|
|
|
|
571
|
|
|
neighbors["scn_name"] = "eGon100RE" |
572
|
|
|
neighbors.index = neighbors["new_index"] |
573
|
|
|
|
574
|
|
|
# Correct geometry for non AC buses |
575
|
|
|
carriers = set(neighbors.carrier.to_list()) |
576
|
|
|
carriers = [e for e in carriers if e not in ("AC", "biogas")] |
577
|
|
|
non_AC_neighbors = pd.DataFrame() |
578
|
|
|
for c in carriers: |
579
|
|
|
c_neighbors = neighbors[neighbors.carrier == c].set_index( |
580
|
|
|
"location", drop=False |
581
|
|
|
) |
582
|
|
|
for i in ["x", "y"]: |
583
|
|
|
c_neighbors = c_neighbors.drop(i, axis=1) |
584
|
|
|
coordinates = neighbors[neighbors.carrier == "AC"][ |
585
|
|
|
["location", "x", "y"] |
586
|
|
|
].set_index("location") |
587
|
|
|
c_neighbors = pd.concat([coordinates, c_neighbors], axis=1).set_index( |
588
|
|
|
"new_index", drop=False |
589
|
|
|
) |
590
|
|
|
non_AC_neighbors = pd.concat([non_AC_neighbors, c_neighbors]) |
591
|
|
|
neighbors = pd.concat( |
592
|
|
|
[neighbors[neighbors.carrier == "AC"], non_AC_neighbors] |
593
|
|
|
) |
594
|
|
|
|
595
|
|
|
for i in [ |
596
|
|
|
"new_index", |
597
|
|
|
"control", |
598
|
|
|
"generator", |
599
|
|
|
"location", |
600
|
|
|
"sub_network", |
601
|
|
|
"unit", |
602
|
|
|
]: |
603
|
|
|
neighbors = neighbors.drop(i, axis=1) |
604
|
|
|
|
605
|
|
|
# Add geometry column |
606
|
|
|
neighbors = ( |
607
|
|
|
gpd.GeoDataFrame( |
608
|
|
|
neighbors, geometry=gpd.points_from_xy(neighbors.x, neighbors.y) |
609
|
|
|
) |
610
|
|
|
.rename_geometry("geom") |
611
|
|
|
.set_crs(4326) |
612
|
|
|
) |
613
|
|
|
|
614
|
|
|
# Unify carrier names |
615
|
|
|
neighbors.carrier = neighbors.carrier.str.replace(" ", "_") |
616
|
|
|
neighbors.carrier.replace( |
617
|
|
|
{ |
618
|
|
|
"gas": "CH4", |
619
|
|
|
"gas_for_industry": "CH4_for_industry", |
620
|
|
|
"H2": "H2_grid", |
621
|
|
|
}, |
622
|
|
|
inplace=True, |
623
|
|
|
) |
624
|
|
|
|
625
|
|
|
neighbors.to_postgis( |
626
|
|
|
"egon_etrago_bus", |
627
|
|
|
engine, |
628
|
|
|
schema="grid", |
629
|
|
|
if_exists="append", |
630
|
|
|
index=True, |
631
|
|
|
index_label="bus_id", |
632
|
|
|
) |
633
|
|
|
|
634
|
|
|
# prepare and write neighboring crossborder lines to etrago tables |
635
|
|
|
def lines_to_etrago(neighbor_lines=neighbor_lines, scn="eGon100RE"): |
636
|
|
|
neighbor_lines["scn_name"] = scn |
637
|
|
|
neighbor_lines["cables"] = 3 * neighbor_lines["num_parallel"].astype( |
638
|
|
|
int |
639
|
|
|
) |
640
|
|
|
neighbor_lines["s_nom"] = neighbor_lines["s_nom_min"] |
641
|
|
|
|
642
|
|
|
for i in [ |
643
|
|
|
"Line", |
644
|
|
|
"x_pu_eff", |
645
|
|
|
"r_pu_eff", |
646
|
|
|
"sub_network", |
647
|
|
|
"x_pu", |
648
|
|
|
"r_pu", |
649
|
|
|
"g_pu", |
650
|
|
|
"b_pu", |
651
|
|
|
"s_nom_opt", |
652
|
|
|
]: |
653
|
|
|
neighbor_lines = neighbor_lines.drop(i, axis=1) |
654
|
|
|
|
655
|
|
|
# Define geometry and add to lines dataframe as 'topo' |
656
|
|
|
gdf = gpd.GeoDataFrame(index=neighbor_lines.index) |
657
|
|
|
gdf["geom_bus0"] = neighbors.geom[neighbor_lines.bus0].values |
658
|
|
|
gdf["geom_bus1"] = neighbors.geom[neighbor_lines.bus1].values |
659
|
|
|
gdf["geometry"] = gdf.apply( |
660
|
|
|
lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1 |
661
|
|
|
) |
662
|
|
|
|
663
|
|
|
neighbor_lines = ( |
664
|
|
|
gpd.GeoDataFrame(neighbor_lines, geometry=gdf["geometry"]) |
665
|
|
|
.rename_geometry("topo") |
666
|
|
|
.set_crs(4326) |
667
|
|
|
) |
668
|
|
|
|
669
|
|
|
neighbor_lines["lifetime"] = get_sector_parameters("electricity", scn)[ |
670
|
|
|
"lifetime" |
671
|
|
|
]["ac_ehv_overhead_line"] |
672
|
|
|
|
673
|
|
|
neighbor_lines.to_postgis( |
674
|
|
|
"egon_etrago_line", |
675
|
|
|
engine, |
676
|
|
|
schema="grid", |
677
|
|
|
if_exists="append", |
678
|
|
|
index=True, |
679
|
|
|
index_label="line_id", |
680
|
|
|
) |
681
|
|
|
|
682
|
|
|
for scn in config.settings()["egon-data"]["--scenarios"]: |
683
|
|
|
lines_to_etrago(neighbor_lines=neighbor_lines, scn=scn) |
684
|
|
|
|
685
|
|
|
def links_to_etrago(neighbor_links, scn="eGon100RE", extendable=True): |
686
|
|
|
"""Prepare and write neighboring crossborder links to eTraGo table |
687
|
|
|
|
688
|
|
|
This function prepare the neighboring crossborder links |
689
|
|
|
generated the PyPSA-eur-sec (p-e-s) run by: |
690
|
|
|
* Delete the useless columns |
691
|
|
|
* If extendable is false only (non default case): |
692
|
|
|
* Replace p_nom = 0 with the p_nom_op values (arrising |
693
|
|
|
from the p-e-s optimisation) |
694
|
|
|
* Setting p_nom_extendable to false |
695
|
|
|
* Add geomtry to the links: 'geom' and 'topo' columns |
696
|
|
|
* Change the name of the carriers to have the consistent in |
697
|
|
|
eGon-data |
698
|
|
|
|
699
|
|
|
The function insert then the link to the eTraGo table and has |
700
|
|
|
no return. |
701
|
|
|
|
702
|
|
|
Parameters |
703
|
|
|
---------- |
704
|
|
|
neighbor_links : pandas.DataFrame |
705
|
|
|
Dataframe containing the neighboring crossborder links |
706
|
|
|
scn_name : str |
707
|
|
|
Name of the scenario |
708
|
|
|
extendable : bool |
709
|
|
|
Boolean expressing if the links should be extendable or not |
710
|
|
|
|
711
|
|
|
Returns |
712
|
|
|
------- |
713
|
|
|
None |
714
|
|
|
|
715
|
|
|
""" |
716
|
|
|
neighbor_links["scn_name"] = scn |
717
|
|
|
|
718
|
|
|
if extendable is True: |
719
|
|
|
neighbor_links = neighbor_links.drop( |
720
|
|
|
columns=[ |
721
|
|
|
"Link", |
722
|
|
|
"geometry", |
723
|
|
|
"tags", |
724
|
|
|
"under_construction", |
725
|
|
|
"underground", |
726
|
|
|
"underwater_fraction", |
727
|
|
|
"bus2", |
728
|
|
|
"bus3", |
729
|
|
|
"bus4", |
730
|
|
|
"efficiency2", |
731
|
|
|
"efficiency3", |
732
|
|
|
"efficiency4", |
733
|
|
|
"lifetime", |
734
|
|
|
"p_nom_opt", |
735
|
|
|
"pipe_retrofit", |
736
|
|
|
"committable", |
737
|
|
|
"start_up_cost", |
738
|
|
|
"shut_down_cost", |
739
|
|
|
"min_up_time", |
740
|
|
|
"min_down_time", |
741
|
|
|
"up_time_before", |
742
|
|
|
"down_time_before", |
743
|
|
|
"ramp_limit_up", |
744
|
|
|
"ramp_limit_down", |
745
|
|
|
"ramp_limit_start_up", |
746
|
|
|
"ramp_limit_shut_down", |
747
|
|
|
], |
748
|
|
|
errors="ignore", |
749
|
|
|
) |
750
|
|
|
|
751
|
|
|
elif extendable is False: |
752
|
|
|
neighbor_links = neighbor_links.drop( |
753
|
|
|
columns=[ |
754
|
|
|
"Link", |
755
|
|
|
"geometry", |
756
|
|
|
"tags", |
757
|
|
|
"under_construction", |
758
|
|
|
"underground", |
759
|
|
|
"underwater_fraction", |
760
|
|
|
"bus2", |
761
|
|
|
"bus3", |
762
|
|
|
"bus4", |
763
|
|
|
"efficiency2", |
764
|
|
|
"efficiency3", |
765
|
|
|
"efficiency4", |
766
|
|
|
"lifetime", |
767
|
|
|
"p_nom", |
768
|
|
|
"p_nom_extendable", |
769
|
|
|
"pipe_retrofit", |
770
|
|
|
"committable", |
771
|
|
|
"start_up_cost", |
772
|
|
|
"shut_down_cost", |
773
|
|
|
"min_up_time", |
774
|
|
|
"min_down_time", |
775
|
|
|
"up_time_before", |
776
|
|
|
"down_time_before", |
777
|
|
|
"ramp_limit_up", |
778
|
|
|
"ramp_limit_start_up", |
779
|
|
|
"ramp_limit_down", |
780
|
|
|
"ramp_limit_shut_down", |
781
|
|
|
], |
782
|
|
|
errors="ignore", |
783
|
|
|
) |
784
|
|
|
neighbor_links = neighbor_links.rename( |
785
|
|
|
columns={"p_nom_opt": "p_nom"} |
786
|
|
|
) |
787
|
|
|
neighbor_links["p_nom_extendable"] = False |
788
|
|
|
|
789
|
|
|
# Define geometry and add to lines dataframe as 'topo' |
790
|
|
|
gdf = gpd.GeoDataFrame(index=neighbor_links.index) |
791
|
|
|
gdf["geom_bus0"] = neighbors.geom[neighbor_links.bus0].values |
792
|
|
|
gdf["geom_bus1"] = neighbors.geom[neighbor_links.bus1].values |
793
|
|
|
gdf["geometry"] = gdf.apply( |
794
|
|
|
lambda x: LineString([x["geom_bus0"], x["geom_bus1"]]), axis=1 |
795
|
|
|
) |
796
|
|
|
|
797
|
|
|
neighbor_links = ( |
798
|
|
|
gpd.GeoDataFrame(neighbor_links, geometry=gdf["geometry"]) |
799
|
|
|
.rename_geometry("topo") |
800
|
|
|
.set_crs(4326) |
801
|
|
|
) |
802
|
|
|
|
803
|
|
|
# Unify carrier names |
804
|
|
|
neighbor_links.carrier = neighbor_links.carrier.str.replace(" ", "_") |
805
|
|
|
|
806
|
|
|
neighbor_links.carrier.replace( |
807
|
|
|
{ |
808
|
|
|
"H2_Electrolysis": "power_to_H2", |
809
|
|
|
"H2_Fuel_Cell": "H2_to_power", |
810
|
|
|
"H2_pipeline_retrofitted": "H2_retrofit", |
811
|
|
|
"SMR": "CH4_to_H2", |
812
|
|
|
"Sabatier": "H2_to_CH4", |
813
|
|
|
"gas_for_industry": "CH4_for_industry", |
814
|
|
|
"gas_pipeline": "CH4", |
815
|
|
|
}, |
816
|
|
|
inplace=True, |
817
|
|
|
) |
818
|
|
|
|
819
|
|
|
for c in [ |
820
|
|
|
"H2_to_CH4", |
821
|
|
|
"H2_to_power", |
822
|
|
|
"power_to_H2", |
823
|
|
|
"CH4_to_H2", |
824
|
|
|
]: |
825
|
|
|
neighbor_links.loc[ |
826
|
|
|
(neighbor_links.carrier == c), |
827
|
|
|
"lifetime", |
828
|
|
|
] = get_sector_parameters("gas", "eGon100RE")["lifetime"][c] |
829
|
|
|
|
830
|
|
|
neighbor_links.to_postgis( |
831
|
|
|
"egon_etrago_link", |
832
|
|
|
engine, |
833
|
|
|
schema="grid", |
834
|
|
|
if_exists="append", |
835
|
|
|
index=True, |
836
|
|
|
index_label="link_id", |
837
|
|
|
) |
838
|
|
|
|
839
|
|
|
non_extendable_links_carriers = [ |
840
|
|
|
"H2 pipeline retrofitted", |
841
|
|
|
"gas pipeline", |
842
|
|
|
"biogas to gas", |
843
|
|
|
] |
844
|
|
|
|
845
|
|
|
# delete unwanted carriers for eTraGo |
846
|
|
|
excluded_carriers = ["gas for industry CC", "SMR CC", "biogas to gas"] |
847
|
|
|
neighbor_links = neighbor_links[ |
848
|
|
|
~neighbor_links.carrier.isin(excluded_carriers) |
849
|
|
|
] |
850
|
|
|
|
851
|
|
|
links_to_etrago( |
852
|
|
|
neighbor_links[ |
853
|
|
|
~neighbor_links.carrier.isin(non_extendable_links_carriers) |
854
|
|
|
], |
855
|
|
|
"eGon100RE", |
856
|
|
|
) |
857
|
|
|
links_to_etrago( |
858
|
|
|
neighbor_links[ |
859
|
|
|
neighbor_links.carrier.isin(non_extendable_links_carriers) |
860
|
|
|
], |
861
|
|
|
"eGon100RE", |
862
|
|
|
extendable=False, |
863
|
|
|
) |
864
|
|
|
|
865
|
|
|
for scn in config.settings()["egon-data"]["--scenarios"]: |
866
|
|
|
if scn != "eGon100RE": |
867
|
|
|
links_to_etrago( |
868
|
|
|
neighbor_links[neighbor_links.carrier == "DC"], scn |
869
|
|
|
) |
870
|
|
|
|
871
|
|
|
# prepare neighboring generators for etrago tables |
872
|
|
|
neighbor_gens["scn_name"] = "eGon100RE" |
873
|
|
|
neighbor_gens["p_nom"] = neighbor_gens["p_nom_opt"] |
874
|
|
|
neighbor_gens["p_nom_extendable"] = False |
875
|
|
|
|
876
|
|
|
# Unify carrier names |
877
|
|
|
neighbor_gens.carrier = neighbor_gens.carrier.str.replace(" ", "_") |
878
|
|
|
|
879
|
|
|
neighbor_gens.carrier.replace( |
880
|
|
|
{ |
881
|
|
|
"onwind": "wind_onshore", |
882
|
|
|
"ror": "run_of_river", |
883
|
|
|
"offwind-ac": "wind_offshore", |
884
|
|
|
"offwind-dc": "wind_offshore", |
885
|
|
|
"urban_central_solar_thermal": "urban_central_solar_thermal_collector", |
886
|
|
|
"residential_rural_solar_thermal": "residential_rural_solar_thermal_collector", |
887
|
|
|
"services_rural_solar_thermal": "services_rural_solar_thermal_collector", |
888
|
|
|
}, |
889
|
|
|
inplace=True, |
890
|
|
|
) |
891
|
|
|
|
892
|
|
|
for i in [ |
893
|
|
|
"Generator", |
894
|
|
|
"weight", |
895
|
|
|
"lifetime", |
896
|
|
|
"p_set", |
897
|
|
|
"q_set", |
898
|
|
|
"p_nom_opt", |
899
|
|
|
]: |
900
|
|
|
neighbor_gens = neighbor_gens.drop(i, axis=1) |
901
|
|
|
|
902
|
|
|
neighbor_gens.to_sql( |
903
|
|
|
"egon_etrago_generator", |
904
|
|
|
engine, |
905
|
|
|
schema="grid", |
906
|
|
|
if_exists="append", |
907
|
|
|
index=True, |
908
|
|
|
index_label="generator_id", |
909
|
|
|
) |
910
|
|
|
|
911
|
|
|
# prepare neighboring loads for etrago tables |
912
|
|
|
neighbor_loads["scn_name"] = "eGon100RE" |
913
|
|
|
|
914
|
|
|
# Unify carrier names |
915
|
|
|
neighbor_loads.carrier = neighbor_loads.carrier.str.replace(" ", "_") |
916
|
|
|
|
917
|
|
|
neighbor_loads.carrier.replace( |
918
|
|
|
{ |
919
|
|
|
"electricity": "AC", |
920
|
|
|
"DC": "AC", |
921
|
|
|
"industry_electricity": "AC", |
922
|
|
|
"H2_pipeline_retrofitted": "H2_system_boundary", |
923
|
|
|
"gas_pipeline": "CH4_system_boundary", |
924
|
|
|
"gas_for_industry": "CH4_for_industry", |
925
|
|
|
}, |
926
|
|
|
inplace=True, |
927
|
|
|
) |
928
|
|
|
|
929
|
|
|
neighbor_loads = neighbor_loads.drop( |
930
|
|
|
columns=["Load"], |
931
|
|
|
errors="ignore", |
932
|
|
|
) |
933
|
|
|
|
934
|
|
|
neighbor_loads.to_sql( |
935
|
|
|
"egon_etrago_load", |
936
|
|
|
engine, |
937
|
|
|
schema="grid", |
938
|
|
|
if_exists="append", |
939
|
|
|
index=True, |
940
|
|
|
index_label="load_id", |
941
|
|
|
) |
942
|
|
|
|
943
|
|
|
# prepare neighboring stores for etrago tables |
944
|
|
|
neighbor_stores["scn_name"] = "eGon100RE" |
945
|
|
|
|
946
|
|
|
# Unify carrier names |
947
|
|
|
neighbor_stores.carrier = neighbor_stores.carrier.str.replace(" ", "_") |
948
|
|
|
|
949
|
|
|
neighbor_stores.carrier.replace( |
950
|
|
|
{ |
951
|
|
|
"Li_ion": "battery", |
952
|
|
|
"gas": "CH4", |
953
|
|
|
}, |
954
|
|
|
inplace=True, |
955
|
|
|
) |
956
|
|
|
neighbor_stores.loc[ |
957
|
|
|
( |
958
|
|
|
(neighbor_stores.e_nom_max <= 1e9) |
959
|
|
|
& (neighbor_stores.carrier == "H2") |
960
|
|
|
), |
961
|
|
|
"carrier", |
962
|
|
|
] = "H2_underground" |
963
|
|
|
neighbor_stores.loc[ |
964
|
|
|
( |
965
|
|
|
(neighbor_stores.e_nom_max > 1e9) |
966
|
|
|
& (neighbor_stores.carrier == "H2") |
967
|
|
|
), |
968
|
|
|
"carrier", |
969
|
|
|
] = "H2_overground" |
970
|
|
|
|
971
|
|
|
for i in [ |
972
|
|
|
"Store", |
973
|
|
|
"p_set", |
974
|
|
|
"q_set", |
975
|
|
|
"e_nom_opt", |
976
|
|
|
"lifetime", |
977
|
|
|
"e_initial_per_period", |
978
|
|
|
"e_cyclic_per_period", |
979
|
|
|
]: |
980
|
|
|
neighbor_stores = neighbor_stores.drop(i, axis=1, errors="ignore") |
981
|
|
|
|
982
|
|
|
for c in ["H2_underground", "H2_overground"]: |
983
|
|
|
neighbor_stores.loc[ |
984
|
|
|
(neighbor_stores.carrier == c), |
985
|
|
|
"lifetime", |
986
|
|
|
] = get_sector_parameters("gas", "eGon100RE")["lifetime"][c] |
987
|
|
|
|
988
|
|
|
neighbor_stores.to_sql( |
989
|
|
|
"egon_etrago_store", |
990
|
|
|
engine, |
991
|
|
|
schema="grid", |
992
|
|
|
if_exists="append", |
993
|
|
|
index=True, |
994
|
|
|
index_label="store_id", |
995
|
|
|
) |
996
|
|
|
|
997
|
|
|
# prepare neighboring storage_units for etrago tables |
998
|
|
|
neighbor_storage["scn_name"] = "eGon100RE" |
999
|
|
|
|
1000
|
|
|
# Unify carrier names |
1001
|
|
|
neighbor_storage.carrier = neighbor_storage.carrier.str.replace(" ", "_") |
1002
|
|
|
|
1003
|
|
|
neighbor_storage.carrier.replace( |
1004
|
|
|
{"PHS": "pumped_hydro", "hydro": "reservoir"}, inplace=True |
1005
|
|
|
) |
1006
|
|
|
|
1007
|
|
|
for i in [ |
1008
|
|
|
"StorageUnit", |
1009
|
|
|
"p_nom_opt", |
1010
|
|
|
"state_of_charge_initial_per_period", |
1011
|
|
|
"cyclic_state_of_charge_per_period", |
1012
|
|
|
]: |
1013
|
|
|
neighbor_storage = neighbor_storage.drop(i, axis=1, errors="ignore") |
1014
|
|
|
|
1015
|
|
|
neighbor_storage.to_sql( |
1016
|
|
|
"egon_etrago_storage", |
1017
|
|
|
engine, |
1018
|
|
|
schema="grid", |
1019
|
|
|
if_exists="append", |
1020
|
|
|
index=True, |
1021
|
|
|
index_label="storage_id", |
1022
|
|
|
) |
1023
|
|
|
|
1024
|
|
|
# writing neighboring loads_t p_sets to etrago tables |
1025
|
|
|
|
1026
|
|
|
neighbor_loads_t_etrago = pd.DataFrame( |
1027
|
|
|
columns=["scn_name", "temp_id", "p_set"], |
1028
|
|
|
index=neighbor_loads_t.columns, |
1029
|
|
|
) |
1030
|
|
|
neighbor_loads_t_etrago["scn_name"] = "eGon100RE" |
1031
|
|
|
neighbor_loads_t_etrago["temp_id"] = 1 |
1032
|
|
|
for i in neighbor_loads_t.columns: |
1033
|
|
|
neighbor_loads_t_etrago["p_set"][i] = neighbor_loads_t[ |
1034
|
|
|
i |
1035
|
|
|
].values.tolist() |
1036
|
|
|
|
1037
|
|
|
neighbor_loads_t_etrago.to_sql( |
1038
|
|
|
"egon_etrago_load_timeseries", |
1039
|
|
|
engine, |
1040
|
|
|
schema="grid", |
1041
|
|
|
if_exists="append", |
1042
|
|
|
index=True, |
1043
|
|
|
index_label="load_id", |
1044
|
|
|
) |
1045
|
|
|
|
1046
|
|
|
# writing neighboring generator_t p_max_pu to etrago tables |
1047
|
|
|
neighbor_gens_t_etrago = pd.DataFrame( |
1048
|
|
|
columns=["scn_name", "temp_id", "p_max_pu"], |
1049
|
|
|
index=neighbor_gens_t.columns, |
1050
|
|
|
) |
1051
|
|
|
neighbor_gens_t_etrago["scn_name"] = "eGon100RE" |
1052
|
|
|
neighbor_gens_t_etrago["temp_id"] = 1 |
1053
|
|
|
for i in neighbor_gens_t.columns: |
1054
|
|
|
neighbor_gens_t_etrago["p_max_pu"][i] = neighbor_gens_t[ |
1055
|
|
|
i |
1056
|
|
|
].values.tolist() |
1057
|
|
|
|
1058
|
|
|
neighbor_gens_t_etrago.to_sql( |
1059
|
|
|
"egon_etrago_generator_timeseries", |
1060
|
|
|
engine, |
1061
|
|
|
schema="grid", |
1062
|
|
|
if_exists="append", |
1063
|
|
|
index=True, |
1064
|
|
|
index_label="generator_id", |
1065
|
|
|
) |
1066
|
|
|
|
1067
|
|
|
# writing neighboring stores_t e_min_pu to etrago tables |
1068
|
|
|
neighbor_stores_t_etrago = pd.DataFrame( |
1069
|
|
|
columns=["scn_name", "temp_id", "e_min_pu"], |
1070
|
|
|
index=neighbor_stores_t.columns, |
1071
|
|
|
) |
1072
|
|
|
neighbor_stores_t_etrago["scn_name"] = "eGon100RE" |
1073
|
|
|
neighbor_stores_t_etrago["temp_id"] = 1 |
1074
|
|
|
for i in neighbor_stores_t.columns: |
1075
|
|
|
neighbor_stores_t_etrago["e_min_pu"][i] = neighbor_stores_t[ |
1076
|
|
|
i |
1077
|
|
|
].values.tolist() |
1078
|
|
|
|
1079
|
|
|
neighbor_stores_t_etrago.to_sql( |
1080
|
|
|
"egon_etrago_store_timeseries", |
1081
|
|
|
engine, |
1082
|
|
|
schema="grid", |
1083
|
|
|
if_exists="append", |
1084
|
|
|
index=True, |
1085
|
|
|
index_label="store_id", |
1086
|
|
|
) |
1087
|
|
|
|
1088
|
|
|
# writing neighboring storage_units inflow to etrago tables |
1089
|
|
|
neighbor_storage_t_etrago = pd.DataFrame( |
1090
|
|
|
columns=["scn_name", "temp_id", "inflow"], |
1091
|
|
|
index=neighbor_storage_t.columns, |
1092
|
|
|
) |
1093
|
|
|
neighbor_storage_t_etrago["scn_name"] = "eGon100RE" |
1094
|
|
|
neighbor_storage_t_etrago["temp_id"] = 1 |
1095
|
|
|
for i in neighbor_storage_t.columns: |
1096
|
|
|
neighbor_storage_t_etrago["inflow"][i] = neighbor_storage_t[ |
1097
|
|
|
i |
1098
|
|
|
].values.tolist() |
1099
|
|
|
|
1100
|
|
|
neighbor_storage_t_etrago.to_sql( |
1101
|
|
|
"egon_etrago_storage_timeseries", |
1102
|
|
|
engine, |
1103
|
|
|
schema="grid", |
1104
|
|
|
if_exists="append", |
1105
|
|
|
index=True, |
1106
|
|
|
index_label="storage_id", |
1107
|
|
|
) |
1108
|
|
|
|
1109
|
|
|
# writing neighboring lines_t s_max_pu to etrago tables |
1110
|
|
|
if not network.lines_t["s_max_pu"].empty: |
1111
|
|
|
neighbor_lines_t_etrago = pd.DataFrame( |
1112
|
|
|
columns=["scn_name", "s_max_pu"], index=neighbor_lines_t.columns |
1113
|
|
|
) |
1114
|
|
|
neighbor_lines_t_etrago["scn_name"] = "eGon100RE" |
1115
|
|
|
|
1116
|
|
|
for i in neighbor_lines_t.columns: |
1117
|
|
|
neighbor_lines_t_etrago["s_max_pu"][i] = neighbor_lines_t[ |
1118
|
|
|
i |
1119
|
|
|
].values.tolist() |
1120
|
|
|
|
1121
|
|
|
neighbor_lines_t_etrago.to_sql( |
1122
|
|
|
"egon_etrago_line_timeseries", |
1123
|
|
|
engine, |
1124
|
|
|
schema="grid", |
1125
|
|
|
if_exists="append", |
1126
|
|
|
index=True, |
1127
|
|
|
index_label="line_id", |
1128
|
|
|
) |
1129
|
|
|
|
1130
|
|
|
|
1131
|
|
|
def overwrite_H2_pipeline_share(): |
1132
|
|
|
"""Overwrite retrofitted_CH4pipeline-to-H2pipeline_share value |
1133
|
|
|
|
1134
|
|
|
Overwrite retrofitted_CH4pipeline-to-H2pipeline_share in the |
1135
|
|
|
scenario parameter table if p-e-s is run. |
1136
|
|
|
This function write in the database and has no return. |
1137
|
|
|
|
1138
|
|
|
""" |
1139
|
|
|
scn_name = "eGon100RE" |
1140
|
|
|
# Select source and target from dataset configuration |
1141
|
|
|
target = egon.data.config.datasets()["pypsa-eur-sec"]["target"] |
1142
|
|
|
|
1143
|
|
|
n = read_network() |
1144
|
|
|
|
1145
|
|
|
H2_pipelines = n.links[n.links["carrier"] == "H2 pipeline retrofitted"] |
1146
|
|
|
CH4_pipelines = n.links[n.links["carrier"] == "gas pipeline"] |
1147
|
|
|
H2_pipes_share = np.mean( |
1148
|
|
|
[ |
1149
|
|
|
(i / j) |
1150
|
|
|
for i, j in zip( |
1151
|
|
|
H2_pipelines.p_nom_opt.to_list(), CH4_pipelines.p_nom.to_list() |
1152
|
|
|
) |
1153
|
|
|
] |
1154
|
|
|
) |
1155
|
|
|
logger.info( |
1156
|
|
|
"retrofitted_CH4pipeline-to-H2pipeline_share = " + str(H2_pipes_share) |
1157
|
|
|
) |
1158
|
|
|
|
1159
|
|
|
parameters = db.select_dataframe( |
1160
|
|
|
f""" |
1161
|
|
|
SELECT * |
1162
|
|
|
FROM {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']} |
1163
|
|
|
WHERE name = '{scn_name}' |
1164
|
|
|
""" |
1165
|
|
|
) |
1166
|
|
|
|
1167
|
|
|
gas_param = parameters.loc[0, "gas_parameters"] |
1168
|
|
|
gas_param["retrofitted_CH4pipeline-to-H2pipeline_share"] = H2_pipes_share |
1169
|
|
|
gas_param = json.dumps(gas_param) |
1170
|
|
|
|
1171
|
|
|
# Update data in db |
1172
|
|
|
db.execute_sql( |
1173
|
|
|
f""" |
1174
|
|
|
UPDATE {target['scenario_parameters']['schema']}.{target['scenario_parameters']['table']} |
1175
|
|
|
SET gas_parameters = '{gas_param}' |
1176
|
|
|
WHERE name = '{scn_name}'; |
1177
|
|
|
""" |
1178
|
|
|
) |
1179
|
|
|
|
1180
|
|
|
|
1181
|
|
|
# Skip execution of pypsa-eur-sec by default until optional task is implemented |
1182
|
|
|
execute_pypsa_eur_sec = False |
1183
|
|
|
|
1184
|
|
|
if execute_pypsa_eur_sec: |
1185
|
|
|
tasks = ( |
1186
|
|
|
run_pypsa_eur_sec, |
1187
|
|
|
clean_database, |
1188
|
|
|
neighbor_reduction, |
1189
|
|
|
overwrite_H2_pipeline_share, |
1190
|
|
|
) |
1191
|
|
|
else: |
1192
|
|
|
tasks = ( |
1193
|
|
|
clean_database, |
1194
|
|
|
neighbor_reduction, |
1195
|
|
|
) |
1196
|
|
|
|
1197
|
|
|
|
1198
|
|
|
class PypsaEurSec(Dataset): |
1199
|
|
|
def __init__(self, dependencies): |
1200
|
|
|
super().__init__( |
1201
|
|
|
name="PypsaEurSec", |
1202
|
|
|
version="0.0.10", |
1203
|
|
|
dependencies=dependencies, |
1204
|
|
|
tasks=tasks, |
1205
|
|
|
) |
1206
|
|
|
|