1
|
|
|
from datetime import datetime |
2
|
|
|
from math import ceil |
3
|
|
|
import os |
4
|
|
|
|
5
|
|
|
from sqlalchemy import ARRAY, Column, Float, Integer, String |
6
|
|
|
from sqlalchemy.ext.declarative import declarative_base |
7
|
|
|
import numpy as np |
8
|
|
|
import pandas as pd |
9
|
|
|
|
10
|
|
|
from egon.data import db |
11
|
|
|
import egon |
12
|
|
|
|
13
|
|
|
Base = declarative_base() |
14
|
|
|
|
15
|
|
|
|
16
|
|
|
class EgonHeatTimeseries(Base): |
17
|
|
|
__tablename__ = "egon_heat_timeseries_selected_profiles" |
18
|
|
|
__table_args__ = {"schema": "demand"} |
19
|
|
|
zensus_population_id = Column(Integer, primary_key=True) |
20
|
|
|
building_id = Column(Integer, primary_key=True) |
21
|
|
|
selected_idp_profiles = Column(ARRAY(Integer)) |
22
|
|
|
|
23
|
|
|
|
24
|
|
View Code Duplication |
def temperature_classes(): |
|
|
|
|
25
|
|
|
return { |
26
|
|
|
-20: 1, |
27
|
|
|
-19: 1, |
28
|
|
|
-18: 1, |
29
|
|
|
-17: 1, |
30
|
|
|
-16: 1, |
31
|
|
|
-15: 1, |
32
|
|
|
-14: 2, |
33
|
|
|
-13: 2, |
34
|
|
|
-12: 2, |
35
|
|
|
-11: 2, |
36
|
|
|
-10: 2, |
37
|
|
|
-9: 3, |
38
|
|
|
-8: 3, |
39
|
|
|
-7: 3, |
40
|
|
|
-6: 3, |
41
|
|
|
-5: 3, |
42
|
|
|
-4: 4, |
43
|
|
|
-3: 4, |
44
|
|
|
-2: 4, |
45
|
|
|
-1: 4, |
46
|
|
|
0: 4, |
47
|
|
|
1: 5, |
48
|
|
|
2: 5, |
49
|
|
|
3: 5, |
50
|
|
|
4: 5, |
51
|
|
|
5: 5, |
52
|
|
|
6: 6, |
53
|
|
|
7: 6, |
54
|
|
|
8: 6, |
55
|
|
|
9: 6, |
56
|
|
|
10: 6, |
57
|
|
|
11: 7, |
58
|
|
|
12: 7, |
59
|
|
|
13: 7, |
60
|
|
|
14: 7, |
61
|
|
|
15: 7, |
62
|
|
|
16: 8, |
63
|
|
|
17: 8, |
64
|
|
|
18: 8, |
65
|
|
|
19: 8, |
66
|
|
|
20: 8, |
67
|
|
|
21: 9, |
68
|
|
|
22: 9, |
69
|
|
|
23: 9, |
70
|
|
|
24: 9, |
71
|
|
|
25: 9, |
72
|
|
|
26: 10, |
73
|
|
|
27: 10, |
74
|
|
|
28: 10, |
75
|
|
|
29: 10, |
76
|
|
|
30: 10, |
77
|
|
|
31: 10, |
78
|
|
|
32: 10, |
79
|
|
|
33: 10, |
80
|
|
|
34: 10, |
81
|
|
|
35: 10, |
82
|
|
|
36: 10, |
83
|
|
|
37: 10, |
84
|
|
|
38: 10, |
85
|
|
|
39: 10, |
86
|
|
|
40: 10, |
87
|
|
|
} |
88
|
|
|
|
89
|
|
|
|
90
|
|
|
def idp_pool_generator(): |
91
|
|
|
""" |
92
|
|
|
Create List of Dataframes for each temperature class for each household stock |
93
|
|
|
|
94
|
|
|
Returns |
95
|
|
|
------- |
96
|
|
|
list |
97
|
|
|
List of dataframes with each element representing a dataframe |
98
|
|
|
for every combination of household stock and temperature class |
99
|
|
|
|
100
|
|
|
""" |
101
|
|
|
path = os.path.join( |
102
|
|
|
os.getcwd(), |
103
|
|
|
"data_bundle_egon_data", |
104
|
|
|
"household_heat_demand_profiles", |
105
|
|
|
"household_heat_demand_profiles.hdf5", |
106
|
|
|
) |
107
|
|
|
index = pd.date_range(datetime(2011, 1, 1, 0), periods=8760, freq="H") |
108
|
|
|
|
109
|
|
|
sfh = pd.read_hdf(path, key="SFH") |
110
|
|
|
mfh = pd.read_hdf(path, key="MFH") |
111
|
|
|
temp = pd.read_hdf(path, key="temperature") |
112
|
|
|
|
113
|
|
|
globals()["luebeck_sfh"] = sfh[sfh.filter(like="Luebeck").columns] |
|
|
|
|
114
|
|
|
globals()["luebeck_mfh"] = mfh[mfh.filter(like="Luebeck").columns] |
115
|
|
|
|
116
|
|
|
globals()["kassel_sfh"] = sfh[sfh.filter(like="Kassel").columns] |
117
|
|
|
globals()["kassel_mfh"] = mfh[mfh.filter(like="Kassel").columns] |
118
|
|
|
|
119
|
|
|
globals()["wuerzburg_sfh"] = sfh[sfh.filter(like="Wuerzburg").columns] |
120
|
|
|
globals()["wuerzburg_mfh"] = mfh[mfh.filter(like="Wuerzburg").columns] |
121
|
|
|
|
122
|
|
|
globals()["chemnitz_sfh"] = sfh[sfh.filter(like="Chemnitz").columns] |
123
|
|
|
globals()["chemnitz_mfh"] = mfh[mfh.filter(like="Chemnitz").columns] |
124
|
|
|
|
125
|
|
|
temp_daily = pd.DataFrame() |
126
|
|
|
for column in temp.columns: |
127
|
|
|
temp_current = ( |
128
|
|
|
temp[column] |
129
|
|
|
.resample("D") |
130
|
|
|
.mean() |
131
|
|
|
.reindex(temp.index) |
132
|
|
|
.fillna(method="ffill") |
133
|
|
|
.fillna(method="bfill") |
134
|
|
|
) |
135
|
|
|
temp_current_geom = temp_current |
136
|
|
|
temp_daily = pd.concat([temp_daily, temp_current_geom], axis=1) |
137
|
|
|
|
138
|
|
|
def round_temperature(station): |
139
|
|
|
""" |
140
|
|
|
Description: Create dataframe to assign temperature class to each day of TRY climate zones |
141
|
|
|
|
142
|
|
|
Parameters |
143
|
|
|
---------- |
144
|
|
|
station : str |
145
|
|
|
Name of the location |
146
|
|
|
|
147
|
|
|
Returns |
148
|
|
|
------- |
149
|
|
|
temp_class : pandas.DataFrame |
150
|
|
|
Each day assignd to their respective temperature class |
151
|
|
|
|
152
|
|
|
""" |
153
|
|
|
intervals = temperature_classes() |
154
|
|
|
temperature_rounded = [] |
155
|
|
|
for i in temp_daily.loc[:, station]: |
156
|
|
|
temperature_rounded.append(ceil(i)) |
157
|
|
|
temperature_interval = [] |
158
|
|
|
for i in temperature_rounded: |
159
|
|
|
temperature_interval.append(intervals[i]) |
160
|
|
|
temp_class_dic = {f"Class_{station}": temperature_interval} |
161
|
|
|
temp_class = pd.DataFrame.from_dict(temp_class_dic) |
162
|
|
|
return temp_class |
163
|
|
|
|
164
|
|
|
temp_class_luebeck = round_temperature("Luebeck") |
165
|
|
|
temp_class_kassel = round_temperature("Kassel") |
166
|
|
|
temp_class_wuerzburg = round_temperature("Wuerzburg") |
167
|
|
|
temp_class_chemnitz = round_temperature("Chemnitz") |
168
|
|
|
temp_class = pd.concat( |
169
|
|
|
[ |
170
|
|
|
temp_class_luebeck, |
171
|
|
|
temp_class_kassel, |
172
|
|
|
temp_class_wuerzburg, |
173
|
|
|
temp_class_chemnitz, |
174
|
|
|
], |
175
|
|
|
axis=1, |
176
|
|
|
) |
177
|
|
|
temp_class.set_index(index, inplace=True) |
178
|
|
|
|
179
|
|
|
def unique_classes(station): |
180
|
|
|
""" |
181
|
|
|
|
182
|
|
|
Returns |
183
|
|
|
------- |
184
|
|
|
classes : list |
185
|
|
|
Collection of temperature classes for each location |
186
|
|
|
|
187
|
|
|
""" |
188
|
|
|
classes = [] |
189
|
|
|
for x in temp_class[f"Class_{station}"]: |
190
|
|
|
if x not in classes: |
191
|
|
|
classes.append(x) |
192
|
|
|
classes.sort() |
193
|
|
|
return classes |
194
|
|
|
|
195
|
|
|
globals()["luebeck_classes"] = unique_classes("Luebeck") |
196
|
|
|
globals()["kassel_classes"] = unique_classes("Kassel") |
197
|
|
|
globals()["wuerzburg_classes"] = unique_classes("Wuerzburg") |
198
|
|
|
globals()["chemnitz_classes"] = unique_classes("Chemnitz") |
199
|
|
|
|
200
|
|
|
stock = ["MFH", "SFH"] |
201
|
|
|
class_list = [2, 3, 4, 5, 6, 7, 8, 9, 10] |
202
|
|
|
for s in stock: |
203
|
|
|
for m in class_list: |
204
|
|
|
globals()[f"idp_collection_class_{m}_{s}"] = pd.DataFrame( |
205
|
|
|
index=range(24) |
206
|
|
|
) |
207
|
|
|
|
208
|
|
|
def splitter(station, household_stock): |
209
|
|
|
""" |
210
|
|
|
|
211
|
|
|
|
212
|
|
|
Parameters |
213
|
|
|
---------- |
214
|
|
|
station : str |
215
|
|
|
Name of the location |
216
|
|
|
household_stock : str |
217
|
|
|
SFH or MFH |
218
|
|
|
|
219
|
|
|
Returns |
220
|
|
|
------- |
221
|
|
|
None. |
222
|
|
|
|
223
|
|
|
""" |
224
|
|
|
this_classes = globals()["{}_classes".format(station.lower())] |
|
|
|
|
225
|
|
|
for classes in this_classes: |
226
|
|
|
this_itteration = globals()[ |
227
|
|
|
"{}_{}".format(station.lower(), household_stock.lower()) |
228
|
|
|
].loc[temp_class["Class_{}".format(station)] == classes, :] |
229
|
|
|
days = list(range(int(len(this_itteration) / 24))) |
230
|
|
|
for day in days: |
231
|
|
|
this_day = this_itteration[day * 24 : (day + 1) * 24] |
232
|
|
|
this_day = this_day.reset_index(drop=True) |
233
|
|
|
globals()[ |
234
|
|
|
f"idp_collection_class_{classes}_{household_stock}" |
235
|
|
|
] = pd.concat( |
236
|
|
|
[ |
237
|
|
|
globals()[ |
238
|
|
|
f"idp_collection_class_{classes}_{household_stock}" |
239
|
|
|
], |
240
|
|
|
this_day, |
241
|
|
|
], |
242
|
|
|
axis=1, |
243
|
|
|
ignore_index=True, |
244
|
|
|
) |
245
|
|
|
|
246
|
|
|
splitter("Luebeck", "SFH") |
247
|
|
|
splitter("Kassel", "SFH") |
248
|
|
|
splitter("Wuerzburg", "SFH") |
249
|
|
|
splitter("Chemnitz", "SFH") |
250
|
|
|
splitter("Luebeck", "MFH") |
251
|
|
|
splitter("Kassel", "MFH") |
252
|
|
|
splitter("Chemnitz", "MFH") |
253
|
|
|
|
254
|
|
|
def pool_normalize(x): |
255
|
|
|
""" |
256
|
|
|
|
257
|
|
|
|
258
|
|
|
Parameters |
259
|
|
|
---------- |
260
|
|
|
x : pandas.Series |
261
|
|
|
24-hour profiles of IDP pool |
262
|
|
|
|
263
|
|
|
Returns |
264
|
|
|
------- |
265
|
|
|
TYPE : pandas.Series |
266
|
|
|
Normalized to their daily total |
267
|
|
|
|
268
|
|
|
""" |
269
|
|
|
if x.sum() != 0: |
270
|
|
|
c = x.sum() |
271
|
|
|
return x / c |
272
|
|
|
else: |
273
|
|
|
return x |
274
|
|
|
|
275
|
|
|
stock = ["MFH", "SFH"] |
276
|
|
|
class_list = [2, 3, 4, 5, 6, 7, 8, 9, 10] |
277
|
|
|
for s in stock: |
278
|
|
|
for m in class_list: |
279
|
|
|
df_name = globals()[f"idp_collection_class_{m}_{s}"] |
280
|
|
|
globals()[f"idp_collection_class_{m}_{s}_norm"] = df_name.apply( |
281
|
|
|
pool_normalize |
282
|
|
|
) |
283
|
|
|
|
284
|
|
|
return [ |
285
|
|
|
idp_collection_class_2_SFH_norm, |
|
|
|
|
286
|
|
|
idp_collection_class_3_SFH_norm, |
|
|
|
|
287
|
|
|
idp_collection_class_4_SFH_norm, |
|
|
|
|
288
|
|
|
idp_collection_class_5_SFH_norm, |
|
|
|
|
289
|
|
|
idp_collection_class_6_SFH_norm, |
|
|
|
|
290
|
|
|
idp_collection_class_7_SFH_norm, |
|
|
|
|
291
|
|
|
idp_collection_class_8_SFH_norm, |
|
|
|
|
292
|
|
|
idp_collection_class_9_SFH_norm, |
|
|
|
|
293
|
|
|
idp_collection_class_10_SFH_norm, |
|
|
|
|
294
|
|
|
idp_collection_class_2_MFH_norm, |
|
|
|
|
295
|
|
|
idp_collection_class_3_MFH_norm, |
|
|
|
|
296
|
|
|
idp_collection_class_4_MFH_norm, |
|
|
|
|
297
|
|
|
idp_collection_class_5_MFH_norm, |
|
|
|
|
298
|
|
|
idp_collection_class_6_MFH_norm, |
|
|
|
|
299
|
|
|
idp_collection_class_7_MFH_norm, |
|
|
|
|
300
|
|
|
idp_collection_class_8_MFH_norm, |
|
|
|
|
301
|
|
|
idp_collection_class_9_MFH_norm, |
|
|
|
|
302
|
|
|
idp_collection_class_10_MFH_norm, |
|
|
|
|
303
|
|
|
] |
304
|
|
|
|
305
|
|
|
|
306
|
|
|
def create(): |
307
|
|
|
""" |
308
|
|
|
Description: Create dataframe with all temprature classes, 24hr. profiles and household stock |
309
|
|
|
|
310
|
|
|
Returns |
311
|
|
|
------- |
312
|
|
|
idp_df : pandas.DataFrame |
313
|
|
|
All IDP pool as classified as per household stock and temperature class |
314
|
|
|
|
315
|
|
|
""" |
316
|
|
|
idp_list = idp_pool_generator() |
317
|
|
|
stock = ["MFH", "SFH"] |
318
|
|
|
class_list = [2, 3, 4, 5, 6, 7, 8, 9, 10] |
319
|
|
|
idp_df = pd.DataFrame(columns=["idp", "house", "temperature_class"]) |
320
|
|
|
for s in stock: |
321
|
|
|
for m in class_list: |
322
|
|
|
if s == "SFH": |
323
|
|
|
i = class_list.index(m) |
324
|
|
|
if s == "MFH": |
325
|
|
|
i = class_list.index(m) + 9 |
326
|
|
|
current_pool = idp_list[i] |
|
|
|
|
327
|
|
|
idp_df = pd.concat( |
328
|
|
|
[ |
329
|
|
|
idp_df, |
330
|
|
|
pd.DataFrame( |
331
|
|
|
data={ |
332
|
|
|
"idp": current_pool.transpose().values.tolist(), |
333
|
|
|
"house": s, |
334
|
|
|
"temperature_class": m, |
335
|
|
|
} |
336
|
|
|
), |
337
|
|
|
], |
338
|
|
|
ignore_index=True, |
339
|
|
|
) |
340
|
|
|
idp_df = idp_df.reset_index(drop=True) |
341
|
|
|
|
342
|
|
|
idp_df.to_sql( |
343
|
|
|
"egon_heat_idp_pool", |
344
|
|
|
con=db.engine(), |
345
|
|
|
schema="demand", |
346
|
|
|
if_exists="replace", |
347
|
|
|
index=True, |
348
|
|
|
dtype={ |
349
|
|
|
"index": Integer(), |
350
|
|
|
"idp": ARRAY(Float()), |
351
|
|
|
"house": String(), |
352
|
|
|
"temperature_class": Integer(), |
353
|
|
|
}, |
354
|
|
|
) |
355
|
|
|
|
356
|
|
|
idp_df["idp"] = idp_df.idp.apply(lambda x: np.array(x)) |
357
|
|
|
|
358
|
|
|
idp_df.idp = idp_df.idp.apply(lambda x: x.astype(np.float32)) |
359
|
|
|
|
360
|
|
|
return idp_df |
361
|
|
|
|
362
|
|
|
|
363
|
|
|
def annual_demand_generator(scenario): |
364
|
|
|
""" |
365
|
|
|
Description: Create dataframe with annual demand and household count for each zensus cell |
366
|
|
|
|
367
|
|
|
Returns |
368
|
|
|
------- |
369
|
|
|
demand_count: pandas.DataFrame |
370
|
|
|
Annual demand of all zensus cell with MFH and SFH count and |
371
|
|
|
respective associated Station |
372
|
|
|
|
373
|
|
|
""" |
374
|
|
|
|
375
|
|
|
demand_zone = db.select_dataframe( |
376
|
|
|
f""" |
377
|
|
|
SELECT a.demand, a.zensus_population_id, a.scenario, c.climate_zone |
378
|
|
|
FROM demand.egon_peta_heat a |
379
|
|
|
JOIN boundaries.egon_map_zensus_climate_zones c |
380
|
|
|
ON a.zensus_population_id = c.zensus_population_id |
381
|
|
|
WHERE a.sector = 'residential' |
382
|
|
|
AND a.scenario = '{scenario}' |
383
|
|
|
""", |
384
|
|
|
index_col="zensus_population_id", |
385
|
|
|
) |
386
|
|
|
|
387
|
|
|
house_count_MFH = db.select_dataframe( |
388
|
|
|
""" |
389
|
|
|
|
390
|
|
|
SELECT cell_id as zensus_population_id, COUNT(*) as number FROM |
391
|
|
|
( |
392
|
|
|
SELECT cell_id, COUNT(*), building_id |
393
|
|
|
FROM demand.egon_household_electricity_profile_of_buildings |
394
|
|
|
GROUP BY (cell_id, building_id) |
395
|
|
|
) a |
396
|
|
|
|
397
|
|
|
WHERE a.count >1 |
398
|
|
|
GROUP BY cell_id |
399
|
|
|
""", |
400
|
|
|
index_col="zensus_population_id", |
401
|
|
|
) |
402
|
|
|
|
403
|
|
|
house_count_SFH = db.select_dataframe( |
404
|
|
|
""" |
405
|
|
|
|
406
|
|
|
SELECT cell_id as zensus_population_id, COUNT(*) as number FROM |
407
|
|
|
( |
408
|
|
|
SELECT cell_id, COUNT(*), building_id |
409
|
|
|
FROM demand.egon_household_electricity_profile_of_buildings |
410
|
|
|
GROUP BY (cell_id, building_id) |
411
|
|
|
) a |
412
|
|
|
WHERE a.count = 1 |
413
|
|
|
GROUP BY cell_id |
414
|
|
|
""", |
415
|
|
|
index_col="zensus_population_id", |
416
|
|
|
) |
417
|
|
|
|
418
|
|
|
demand_zone["SFH"] = house_count_SFH.number |
419
|
|
|
demand_zone["MFH"] = house_count_MFH.number |
420
|
|
|
|
421
|
|
|
demand_zone["SFH"].fillna(0, inplace=True) |
422
|
|
|
demand_zone["MFH"].fillna(0, inplace=True) |
423
|
|
|
|
424
|
|
|
return demand_zone |
425
|
|
|
|
426
|
|
|
|
427
|
|
|
def select(): |
428
|
|
|
""" |
429
|
|
|
|
430
|
|
|
Random assignment of intray-day profiles to each day based on their temeprature class |
431
|
|
|
and household stock count |
432
|
|
|
|
433
|
|
|
Returns |
434
|
|
|
------- |
435
|
|
|
None. |
436
|
|
|
|
437
|
|
|
""" |
438
|
|
|
start_profile_selector = datetime.now() |
439
|
|
|
|
440
|
|
|
# Drop old table and re-create it |
441
|
|
|
engine = db.engine() |
442
|
|
|
EgonHeatTimeseries.__table__.drop(bind=engine, checkfirst=True) |
443
|
|
|
EgonHeatTimeseries.__table__.create(bind=engine, checkfirst=True) |
444
|
|
|
|
445
|
|
|
# Select all intra-day-profiles |
446
|
|
|
idp_df = db.select_dataframe( |
447
|
|
|
""" |
448
|
|
|
SELECT index, house, temperature_class |
449
|
|
|
FROM demand.egon_heat_idp_pool |
450
|
|
|
""", |
451
|
|
|
index_col="index", |
452
|
|
|
) |
453
|
|
|
|
454
|
|
|
# Select daily heat demand shares per climate zone from table |
455
|
|
|
temperature_classes = db.select_dataframe( |
456
|
|
|
""" |
457
|
|
|
SELECT climate_zone, day_of_year, temperature_class |
458
|
|
|
FROM demand.egon_daily_heat_demand_per_climate_zone |
459
|
|
|
""" |
460
|
|
|
) |
461
|
|
|
|
462
|
|
|
# Calculate annual heat demand per census cell |
463
|
|
|
annual_demand = annual_demand_generator( |
464
|
|
|
scenario=egon.data.config.settings()["egon-data"]["--scenarios"][0] |
465
|
|
|
) |
466
|
|
|
|
467
|
|
|
# Count number of SFH and MFH per climate zone |
468
|
|
|
houses_per_climate_zone = ( |
469
|
|
|
annual_demand.groupby("climate_zone")[["SFH", "MFH"]].sum().astype(int) |
470
|
|
|
) |
471
|
|
|
|
472
|
|
|
# Set random seed to make code reproducable |
473
|
|
|
np.random.seed( |
474
|
|
|
seed=egon.data.config.settings()["egon-data"]["--random-seed"] |
475
|
|
|
) |
476
|
|
|
|
477
|
|
|
for station in houses_per_climate_zone.index: |
478
|
|
|
result_SFH = pd.DataFrame(columns=range(1, 366)) |
479
|
|
|
result_MFH = pd.DataFrame(columns=range(1, 366)) |
480
|
|
|
|
481
|
|
|
# Randomly select individual daily demand profile for selected climate zone |
482
|
|
|
for day in range(1, 366): |
483
|
|
|
t_class = temperature_classes.loc[ |
484
|
|
|
(temperature_classes.climate_zone == station) |
485
|
|
|
& (temperature_classes.day_of_year == day), |
486
|
|
|
"temperature_class", |
487
|
|
|
].values[0] |
488
|
|
|
|
489
|
|
|
result_SFH[day] = np.random.choice( |
490
|
|
|
np.array( |
491
|
|
|
idp_df[ |
492
|
|
|
(idp_df.temperature_class == t_class) |
493
|
|
|
& (idp_df.house == "SFH") |
494
|
|
|
].index.values |
495
|
|
|
), |
496
|
|
|
houses_per_climate_zone.loc[station, "SFH"], |
497
|
|
|
) |
498
|
|
|
|
499
|
|
|
result_MFH[day] = np.random.choice( |
500
|
|
|
np.array( |
501
|
|
|
idp_df[ |
502
|
|
|
(idp_df.temperature_class == t_class) |
503
|
|
|
& (idp_df.house == "MFH") |
504
|
|
|
].index.values |
505
|
|
|
), |
506
|
|
|
houses_per_climate_zone.loc[station, "MFH"], |
507
|
|
|
) |
508
|
|
|
|
509
|
|
|
result_SFH["zensus_population_id"] = ( |
510
|
|
|
annual_demand[annual_demand.climate_zone == station] |
511
|
|
|
.loc[ |
512
|
|
|
annual_demand[ |
513
|
|
|
annual_demand.climate_zone == station |
514
|
|
|
].index.repeat( |
515
|
|
|
annual_demand[ |
516
|
|
|
annual_demand.climate_zone == station |
517
|
|
|
].SFH.astype(int) |
518
|
|
|
) |
519
|
|
|
] |
520
|
|
|
.index.values |
521
|
|
|
) |
522
|
|
|
|
523
|
|
|
result_SFH["building_id"] = ( |
524
|
|
|
db.select_dataframe( |
525
|
|
|
""" |
526
|
|
|
|
527
|
|
|
SELECT cell_id as zensus_population_id, building_id FROM |
528
|
|
|
( |
529
|
|
|
SELECT cell_id, COUNT(*), building_id |
530
|
|
|
FROM demand.egon_household_electricity_profile_of_buildings |
531
|
|
|
GROUP BY (cell_id, building_id) |
532
|
|
|
) a |
533
|
|
|
WHERE a.count = 1 |
534
|
|
|
""", |
535
|
|
|
index_col="zensus_population_id", |
536
|
|
|
) |
537
|
|
|
.loc[result_SFH["zensus_population_id"].unique(), "building_id"] |
538
|
|
|
.values |
539
|
|
|
) |
540
|
|
|
|
541
|
|
|
result_MFH["zensus_population_id"] = ( |
542
|
|
|
annual_demand[annual_demand.climate_zone == station] |
543
|
|
|
.loc[ |
544
|
|
|
annual_demand[ |
545
|
|
|
annual_demand.climate_zone == station |
546
|
|
|
].index.repeat( |
547
|
|
|
annual_demand[ |
548
|
|
|
annual_demand.climate_zone == station |
549
|
|
|
].MFH.astype(int) |
550
|
|
|
) |
551
|
|
|
] |
552
|
|
|
.index.values |
553
|
|
|
) |
554
|
|
|
|
555
|
|
|
result_MFH["building_id"] = ( |
556
|
|
|
db.select_dataframe( |
557
|
|
|
""" |
558
|
|
|
|
559
|
|
|
SELECT cell_id as zensus_population_id, building_id FROM |
560
|
|
|
( |
561
|
|
|
SELECT cell_id, COUNT(*), building_id |
562
|
|
|
FROM demand.egon_household_electricity_profile_of_buildings |
563
|
|
|
GROUP BY (cell_id, building_id) |
564
|
|
|
) a |
565
|
|
|
WHERE a.count > 1 |
566
|
|
|
""", |
567
|
|
|
index_col="zensus_population_id", |
568
|
|
|
) |
569
|
|
|
.loc[result_MFH["zensus_population_id"].unique(), "building_id"] |
570
|
|
|
.values |
571
|
|
|
) |
572
|
|
|
|
573
|
|
|
df_sfh = pd.DataFrame( |
574
|
|
|
data={ |
575
|
|
|
"selected_idp_profiles": result_SFH[ |
576
|
|
|
range(1, 366) |
577
|
|
|
].values.tolist(), |
578
|
|
|
"zensus_population_id": ( |
579
|
|
|
annual_demand[annual_demand.climate_zone == station] |
580
|
|
|
.loc[ |
581
|
|
|
annual_demand[ |
582
|
|
|
annual_demand.climate_zone == station |
583
|
|
|
].index.repeat( |
584
|
|
|
annual_demand[ |
585
|
|
|
annual_demand.climate_zone == station |
586
|
|
|
].SFH.astype(int) |
587
|
|
|
) |
588
|
|
|
] |
589
|
|
|
.index.values |
590
|
|
|
), |
591
|
|
|
"building_id": ( |
592
|
|
|
db.select_dataframe( |
593
|
|
|
""" |
594
|
|
|
|
595
|
|
|
SELECT cell_id as zensus_population_id, building_id FROM |
596
|
|
|
( |
597
|
|
|
SELECT cell_id, COUNT(*), building_id |
598
|
|
|
FROM demand.egon_household_electricity_profile_of_buildings |
599
|
|
|
GROUP BY (cell_id, building_id) |
600
|
|
|
) a |
601
|
|
|
WHERE a.count = 1 |
602
|
|
|
""", |
603
|
|
|
index_col="zensus_population_id", |
604
|
|
|
) |
605
|
|
|
.loc[ |
606
|
|
|
result_SFH["zensus_population_id"].unique(), |
607
|
|
|
"building_id", |
608
|
|
|
] |
609
|
|
|
.values |
610
|
|
|
), |
611
|
|
|
} |
612
|
|
|
) |
613
|
|
|
start_sfh = datetime.now() |
614
|
|
|
df_sfh.set_index(["zensus_population_id", "building_id"]).to_sql( |
615
|
|
|
EgonHeatTimeseries.__table__.name, |
616
|
|
|
schema=EgonHeatTimeseries.__table__.schema, |
617
|
|
|
con=db.engine(), |
618
|
|
|
if_exists="append", |
619
|
|
|
chunksize=5000, |
620
|
|
|
method="multi", |
621
|
|
|
) |
622
|
|
|
print(f"SFH insertation for zone {station}:") |
623
|
|
|
print(datetime.now() - start_sfh) |
624
|
|
|
|
625
|
|
|
df_mfh = pd.DataFrame( |
626
|
|
|
data={ |
627
|
|
|
"selected_idp_profiles": result_MFH[ |
628
|
|
|
range(1, 366) |
629
|
|
|
].values.tolist(), |
630
|
|
|
"zensus_population_id": ( |
631
|
|
|
annual_demand[annual_demand.climate_zone == station] |
632
|
|
|
.loc[ |
633
|
|
|
annual_demand[ |
634
|
|
|
annual_demand.climate_zone == station |
635
|
|
|
].index.repeat( |
636
|
|
|
annual_demand[ |
637
|
|
|
annual_demand.climate_zone == station |
638
|
|
|
].MFH.astype(int) |
639
|
|
|
) |
640
|
|
|
] |
641
|
|
|
.index.values |
642
|
|
|
), |
643
|
|
|
"building_id": ( |
644
|
|
|
db.select_dataframe( |
645
|
|
|
""" |
646
|
|
|
|
647
|
|
|
SELECT cell_id as zensus_population_id, building_id FROM |
648
|
|
|
( |
649
|
|
|
SELECT cell_id, COUNT(*), building_id |
650
|
|
|
FROM demand.egon_household_electricity_profile_of_buildings |
651
|
|
|
GROUP BY (cell_id, building_id) |
652
|
|
|
) a |
653
|
|
|
WHERE a.count > 1 |
654
|
|
|
""", |
655
|
|
|
index_col="zensus_population_id", |
656
|
|
|
) |
657
|
|
|
.loc[ |
658
|
|
|
result_MFH["zensus_population_id"].unique(), |
659
|
|
|
"building_id", |
660
|
|
|
] |
661
|
|
|
.values |
662
|
|
|
), |
663
|
|
|
} |
664
|
|
|
) |
665
|
|
|
|
666
|
|
|
start_mfh = datetime.now() |
667
|
|
|
df_mfh.set_index(["zensus_population_id", "building_id"]).to_sql( |
668
|
|
|
EgonHeatTimeseries.__table__.name, |
669
|
|
|
schema=EgonHeatTimeseries.__table__.schema, |
670
|
|
|
con=db.engine(), |
671
|
|
|
if_exists="append", |
672
|
|
|
chunksize=5000, |
673
|
|
|
method="multi", |
674
|
|
|
) |
675
|
|
|
print(f"MFH insertation for zone {station}:") |
676
|
|
|
print(datetime.now() - start_mfh) |
677
|
|
|
|
678
|
|
|
print("Time for overall profile selection:") |
679
|
|
|
print(datetime.now() - start_profile_selector) |
680
|
|
|
|