Conditions | 19 |
Total Lines | 337 |
Code Lines | 167 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like data.datasets.osmtgmod.osmtgmod() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | from pathlib import Path |
||
205 | def osmtgmod( |
||
206 | config_database="egon-data", |
||
207 | config_basepath="osmTGmod/egon-data", |
||
208 | config_continue_run=False, |
||
209 | filtered_osm_pbf_path_to_file=None, |
||
210 | docker_db_config=None, |
||
211 | ): |
||
212 | |||
213 | if ("germany-21" in filtered_osm_pbf_path_to_file) | ( |
||
214 | "germany-22" in filtered_osm_pbf_path_to_file |
||
215 | ): |
||
216 | """ |
||
217 | Manually add under construction substation expansion in Garenfeld |
||
218 | to existing substation. (see:) |
||
219 | """ |
||
220 | print("Manually updating geometry of substation in Garenfeld") |
||
221 | db.execute_sql( |
||
222 | """DROP TRIGGER IF EXISTS |
||
223 | power_ways_update ON power_ways CASCADE """ |
||
224 | ) |
||
225 | |||
226 | db.execute_sql( |
||
227 | """ |
||
228 | UPDATE power_ways |
||
229 | SET way = (SELECT ST_SetSRID(ST_AsText( |
||
230 | '0102000000160000001612D5004A081E4020A8644A35B349407B0ACA' |
||
231 | '7E27071E405F23EE563BB34940287CB60E0E061E4055A4C2D842B34940352FE29' |
||
232 | '6EA051E4017940E7B46B34940C0D02346CF051E4042EBE1CB44B34940D67E219A' |
||
233 | '2F051E40FECF06054AB349407F964A442F031E40C2F441F471B34940A8A544676' |
||
234 | '1021E40AB9412CA8FB349409C4848881E021E40B7BA08C691B34940B22D4E1430' |
||
235 | '001E40CE913856BDB34940E2810B122C001E40898CAEAFDBB349402CDAF043480' |
||
236 | '11E40ED678C32F0B349402FE640E25C041E405A86F21AF1B3494061D525C46F04' |
||
237 | '1E40ABEF60C892B34940DC2F9FAC18061E400D33D9E495B349401FD7868A71061' |
||
238 | 'E40D2D8A89894B3494083932353F4061E40077360DE88B34940624ED02687071E' |
||
239 | '404F08782D7CB349405000C5C892091E403EFBDBAF4CB349403DDBFEF04E091E4' |
||
240 | '0658D7A8846B349405AD5928E72081E405BE8EF4A37B349401612D5004A081E40' |
||
241 | '20A8644A35B34940'), 4326)) |
||
242 | WHERE name = 'Garenfeld' |
||
243 | AND id = 24667346 |
||
244 | """ |
||
245 | ) |
||
246 | |||
247 | # ============================================================== |
||
248 | # Setup logging |
||
249 | # ============================================================== |
||
250 | # log = logging.getLogger() |
||
251 | # log.setLevel(logging.INFO) |
||
252 | # logformat = logging.Formatter( |
||
253 | # "%(asctime)s %(message)s", "%m/%d/%Y %H:%M:%S" |
||
254 | # ) |
||
255 | # sh = logging.StreamHandler() |
||
256 | # sh.setFormatter(logformat) |
||
257 | # log.addHandler(sh) |
||
258 | # logging.info("\n\n======================\nego_otg\n======================") |
||
259 | # logging.info("Logging to standard output...") |
||
260 | # # catch up some log messages from evaluation of command line arguments |
||
261 | # logging.info("Database: {}".format(config_database)) |
||
262 | # logging.info( |
||
263 | # "Path for configuration file and results: {}".format(config_basepath) |
||
264 | # ) |
||
265 | # ============================================================== |
||
266 | # read configuration from file and create folder structure |
||
267 | # ============================================================== |
||
268 | logging.info( |
||
269 | ( |
||
270 | "Taking db connection credentials from eGon-data " |
||
271 | "with respect to the given docker_db_config variable" |
||
272 | ) |
||
273 | ) |
||
274 | config = configparser.ConfigParser() |
||
275 | config.read(config_basepath + ".cfg") |
||
276 | config["postgres_server"]["host"] = docker_db_config["HOST"] |
||
277 | config["postgres_server"]["port"] = docker_db_config["PORT"] |
||
278 | config["postgres_server"]["user"] = docker_db_config["POSTGRES_USER"] |
||
279 | config["postgres_server"]["password"] = docker_db_config[ |
||
280 | "POSTGRES_PASSWORD" |
||
281 | ] |
||
282 | |||
283 | # Setting osmTGmod folder structure: |
||
284 | logging.info("Checking/Creating file directories") |
||
285 | input_data_dir = os.path.join(config_basepath, "input_data") |
||
286 | result_dir = os.path.join(config_basepath, "results") |
||
287 | # Basic folders are created if not existent |
||
288 | if not os.path.exists(input_data_dir): |
||
289 | os.makedirs(input_data_dir) |
||
290 | if not os.path.exists(result_dir): |
||
291 | os.makedirs(result_dir) |
||
292 | # start logging to file |
||
293 | # logfile = os.path.join(config_basepath, config_database + ".log") |
||
294 | # fh = logging.FileHandler(logfile) |
||
295 | # fh.setFormatter(logformat) |
||
296 | # log.addHandler(fh) |
||
297 | # logging.info("Logging to file '{0}' is set up".format(logfile)) |
||
298 | # logging.info( |
||
299 | # "Now logging both to standard output and to file '{0}'...".format( |
||
300 | # logfile |
||
301 | # ) |
||
302 | # ) |
||
303 | logging.info("\n\n======================\nego_otg\n======================") |
||
304 | # copy config file |
||
305 | logging.info( |
||
306 | "Copying configuration file to '{0}'.".format( |
||
307 | os.path.join(config_basepath, config_database + ".cfg") |
||
308 | ) |
||
309 | ) |
||
310 | os.system( |
||
311 | "cp {0} {1}".format( |
||
312 | config_basepath + ".cfg", |
||
313 | os.path.join(config_basepath, config_database + ".cfg"), |
||
314 | ) |
||
315 | ) |
||
316 | |||
317 | # Connects to new Database |
||
318 | logging.info("Connecting to database {} ...".format(config_database)) |
||
319 | conn = psycopg2.connect( |
||
320 | host=config["postgres_server"]["host"], |
||
321 | port=config["postgres_server"]["port"], |
||
322 | database=config_database, |
||
323 | user=config["postgres_server"]["user"], |
||
324 | password=config["postgres_server"]["password"], |
||
325 | ) |
||
326 | |||
327 | cur = conn.cursor() |
||
328 | |||
329 | min_voltage = 110000 |
||
330 | |||
331 | if not config_continue_run: |
||
332 | logging.info("Setting min_voltage...") |
||
333 | cur.execute( |
||
334 | """ |
||
335 | UPDATE abstr_values |
||
336 | SET val_int = %s |
||
337 | WHERE val_description = 'min_voltage'""", |
||
338 | (min_voltage,), |
||
339 | ) |
||
340 | conn.commit() |
||
341 | |||
342 | logging.info("Setting main_station...") |
||
343 | cur.execute( |
||
344 | """ |
||
345 | UPDATE abstr_values |
||
346 | SET val_int = %s |
||
347 | WHERE val_description = 'main_station'""", |
||
348 | (config.getint("abstraction", "main_station"),), |
||
349 | ) |
||
350 | conn.commit() |
||
351 | |||
352 | logging.info("Setting graph_dfs...") |
||
353 | cur.execute( |
||
354 | """ |
||
355 | UPDATE abstr_values |
||
356 | SET val_bool = %s |
||
357 | WHERE val_description = 'graph_dfs'""", |
||
358 | (config.getboolean("abstraction", "graph_dfs"),), |
||
359 | ) |
||
360 | conn.commit() |
||
361 | |||
362 | logging.info("Setting conn_subgraphs...") |
||
363 | cur.execute( |
||
364 | """ |
||
365 | UPDATE abstr_values |
||
366 | SET val_bool = %s |
||
367 | WHERE val_description = 'conn_subgraphs'""", |
||
368 | (config.getboolean("abstraction", "conn_subgraphs"),), |
||
369 | ) |
||
370 | conn.commit() |
||
371 | |||
372 | logging.info("Setting transfer_busses...") |
||
373 | cur.execute( |
||
374 | """ |
||
375 | UPDATE abstr_values |
||
376 | SET val_bool = %s |
||
377 | WHERE val_description = 'transfer_busses'""", |
||
378 | (config.getboolean("abstraction", "transfer_busses"),), |
||
379 | ) |
||
380 | conn.commit() |
||
381 | |||
382 | # setting transfer busses |
||
383 | path_for_transfer_busses = input_data_dir + "/transfer_busses.csv" |
||
384 | logging.info( |
||
385 | "Reading transfer busses from file {} ...".format( |
||
386 | path_for_transfer_busses |
||
387 | ) |
||
388 | ) |
||
389 | logging.info("Deleting all entries from transfer_busses table ...") |
||
390 | cur.execute( |
||
391 | """ |
||
392 | DELETE FROM transfer_busses; |
||
393 | """ |
||
394 | ) |
||
395 | conn.commit() |
||
396 | |||
397 | with open(path_for_transfer_busses, "w") as this_file: |
||
398 | cur.copy_expert( |
||
399 | """COPY transfer_busses_complete to |
||
400 | STDOUT WITH CSV HEADER""", |
||
401 | this_file, |
||
402 | ) |
||
403 | conn.commit() |
||
404 | |||
405 | reader = csv.reader(open(path_for_transfer_busses, "r")) |
||
406 | next(reader, None) # Skips header |
||
407 | logging.info("Copying transfer-busses from CSV to database...") |
||
408 | for row in reader: |
||
409 | osm_id = str(row[8]) |
||
410 | if osm_id[:1] == "w": |
||
411 | object_type = "way" |
||
412 | elif osm_id[:1] == "n": |
||
413 | object_type = "node" |
||
414 | else: |
||
415 | object_type = None |
||
416 | osm_id_int = int(osm_id[1:]) |
||
417 | center_geom = str(row[3]) |
||
418 | cur.execute( |
||
419 | """ |
||
420 | INSERT INTO transfer_busses (osm_id, object_type, |
||
421 | center_geom) |
||
422 | VALUES (%s, %s, %s); |
||
423 | """, |
||
424 | (osm_id_int, object_type, center_geom), |
||
425 | ) |
||
426 | conn.commit() |
||
427 | logging.info("All transfer busses imported successfully") |
||
428 | |||
429 | # Execute power_script |
||
430 | logging.info( |
||
431 | ( |
||
432 | "Preparing execution of abstraction script " |
||
433 | "'sql-scripts/power_script.sql' ..." |
||
434 | ) |
||
435 | ) |
||
436 | with codecs.open( |
||
437 | "osmTGmod/sql-scripts/power_script.sql", "r", "utf-8-sig" |
||
438 | ) as fd: |
||
439 | sqlfile = fd.read() |
||
440 | # remove lines starting with "--" (comments), tabulators and empty line |
||
441 | # beware: comments in C-like style (such as /* comment */) arn't parsed! |
||
442 | sqlfile_without_comments = "".join( |
||
443 | [ |
||
444 | ( |
||
445 | line.lstrip().split("--")[0] + "\n" |
||
446 | if not line.lstrip().split("--")[0] == "" |
||
447 | else "" |
||
448 | ) |
||
449 | for line in sqlfile.split("\n") |
||
450 | ] |
||
451 | ) |
||
452 | |||
453 | logging.info("Stating execution of power script...") |
||
454 | config_continue_run_at = -1 |
||
455 | |||
456 | if not config_continue_run: # debugging - to be removed |
||
457 | cur.execute( |
||
458 | """drop table if exists debug;create table debug |
||
459 | (step_before int,max_bus_id int, num_bus int,max_branch_id int, |
||
460 | num_branch int, num_110_bus int, num_220_bus int, |
||
461 | num_380_bus int)""" |
||
462 | ) |
||
463 | conn.commit() |
||
464 | |||
465 | # split sqlfile in commands seperated by ";", while not considering |
||
466 | # symbols for splitting if "escaped" by single quoted strings. |
||
467 | # Drop everything after last semicolon. |
||
468 | for i, command in enumerate( |
||
469 | "'".join( |
||
470 | [ |
||
471 | segment.replace(";", "§") if i % 2 == 0 else segment |
||
472 | for i, segment in enumerate( |
||
473 | sqlfile_without_comments.split("'") |
||
474 | ) |
||
475 | ] |
||
476 | ).split("§")[:-1] |
||
477 | ): |
||
478 | |||
479 | if i >= config_continue_run_at: |
||
480 | logging.info( |
||
481 | "Executing SQL statement {0}:{1}\n".format(i, command) |
||
482 | ) |
||
483 | try: |
||
484 | cur.execute(command) |
||
485 | conn.commit() |
||
486 | except: |
||
487 | logging.exception( |
||
488 | ( |
||
489 | "Exception raised with command {0}. " |
||
490 | "Check data and code " |
||
491 | "and restart with 'python ego_otg.py {1} {0}'." |
||
492 | ).format(i, config_database) |
||
493 | ) |
||
494 | sys.exit() |
||
495 | if i > 16: # debugging - to be removed |
||
496 | cur.execute( |
||
497 | """insert into debug values ({0}, |
||
498 | (select max(id) from bus_data),(select count(*) |
||
499 | from bus_data),(select max(branch_id) |
||
500 | from branch_data),(select count(*) |
||
501 | from branch_data),(select count(*) |
||
502 | from bus_data where voltage = 110000), |
||
503 | (select count (*) from bus_data where voltage = 220000), |
||
504 | (select count (*) |
||
505 | from bus_data where voltage = 380000))""".format( |
||
506 | i |
||
507 | ) |
||
508 | ) |
||
509 | conn.commit() |
||
510 | |||
511 | logging.info("Power-script executed successfully.") |
||
512 | |||
513 | logging.info("Saving Results...") |
||
514 | cur.execute("SELECT otg_save_results ();") |
||
515 | conn.commit() |
||
516 | |||
517 | logging.info("Abstraction process complete!") |
||
518 | |||
519 | # ============================================================== |
||
520 | # Write results |
||
521 | # ============================================================== |
||
522 | logging.info("Writing results") |
||
523 | |||
524 | tables = ["bus_data", "branch_data", "dcline_data", "results_metadata"] |
||
525 | for table in tables: |
||
526 | logging.info("writing %s..." % table) |
||
527 | filename = os.path.join(result_dir, table + ".csv") |
||
528 | logging.info( |
||
529 | "Writing contents of table {0} to {1}...".format(table, filename) |
||
530 | ) |
||
531 | query = "SELECT * FROM osmtgmod_results.%s " % (table,) |
||
532 | outputquery = "COPY ({0}) TO STDOUT WITH DELIMITER \ |
||
533 | ',' CSV HEADER".format( |
||
534 | query |
||
535 | ) |
||
536 | with open(filename, encoding="utf-8", mode="w") as fh: |
||
537 | cur.copy_expert(outputquery, fh) |
||
538 | |||
539 | logging.info("All tables written!") |
||
540 | |||
541 | logging.info("EXECUTION FINISHED SUCCESSFULLY!") |
||
542 | |||
875 |