Passed
Pull Request — dev (#905)
by
unknown
01:34
created

timeit()   A

Complexity

Conditions 1

Size

Total Lines 15
Code Lines 9

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 9
dl 0
loc 15
rs 9.95
c 0
b 0
f 0
cc 1
nop 1
1
from io import StringIO
2
import csv
3
import time
4
5
from shapely.geometry import Point
6
import geopandas as gpd
7
import numpy as np
8
import pandas as pd
9
10
from egon.data import db
11
12
engine = db.engine()
13
14
15
def timeit(func):
16
    """
17
    Decorator for measuring function's running time.
18
    """
19
20
    def measure_time(*args, **kw):
21
        start_time = time.time()
22
        result = func(*args, **kw)
23
        print(
24
            "Processing time of %s(): %.2f seconds."
25
            % (func.__qualname__, time.time() - start_time)
26
        )
27
        return result
28
29
    return measure_time
30
31
32
def random_point_in_square(geom, tol):
33
    """
34
    Generate a random point within a square
35
36
    Parameters
37
    ----------
38
    geom: gpd.Series
39
        Geometries of square
40
    tol: float
41
        tolerance to square bounds
42
43
    Returns
44
    -------
45
    points: gpd.Series
46
        Series of random points
47
    """
48
    # cell bounds - half edge_length to not build buildings on the cell border
49
    xmin = geom.bounds["minx"] + tol / 2
50
    xmax = geom.bounds["maxx"] - tol / 2
51
    ymin = geom.bounds["miny"] + tol / 2
52
    ymax = geom.bounds["maxy"] - tol / 2
53
54
    # generate random coordinates within bounds - half edge_length
55
    x = (xmax - xmin) * np.random.rand(geom.shape[0]) + xmin
56
    y = (ymax - ymin) * np.random.rand(geom.shape[0]) + ymin
57
58
    points = pd.Series([Point(cords) for cords in zip(x, y)])
59
    points = gpd.GeoSeries(points, crs="epsg:3035")
60
61
    return points
62
63
64
# distribute amenities evenly
65
def specific_int_until_sum(s_sum, i_int):
66
    """
67
    Generate list `i_int` summing to `s_sum`. Last value will be <= `i_int`
68
    """
69
    list_i = [] if [s_sum % i_int] == [0] else [s_sum % i_int]
70
    list_i += s_sum // i_int * [i_int]
71
    return list_i
72
73
74
def random_ints_until_sum(s_sum, m_max):
75
    """
76
    Generate non-negative random integers < `m_max` summing to `s_sum`.
77
    """
78
    list_r = []
79
    while s_sum > 0:
80
        r = np.random.randint(1, m_max + 1)
81
        r = r if r <= m_max and r < s_sum else s_sum
82
        list_r.append(r)
83
        s_sum -= r
84
    return list_r
85
86
87
def write_table_to_postgis(df, table, engine, drop=True):
88
    """
89
    Append table
90
    """
91
92
    # Only take in db table defined columns
93
    columns = [column.key for column in table.__table__.columns]
94
    df = df.loc[:, columns]
95
96
    if drop:
97
        table.__table__.drop(bind=engine, checkfirst=True)
98
        table.__table__.create(bind=engine)
99
100
    dtypes = {
101
        i: table.__table__.columns[i].type
102
        for i in table.__table__.columns.keys()
103
    }
104
105
    # Write new buildings incl coord into db
106
    df.to_postgis(
107
        name=table.__tablename__,
108
        con=engine,
109
        if_exists="append",
110
        schema=table.__table_args__["schema"],
111
        dtype=dtypes,
112
    )
113
114
115
# def write_table_to_postgres(df, table, drop=True):
116
#     """"""
117
#
118
#     # Only take in db table defined columns
119
#     columns = [column.key for column in table.__table__.columns]
120
#     df = df.loc[:, columns]
121
#
122
#     if drop:
123
#         table.__table__.drop(bind=engine, checkfirst=True)
124
#         table.__table__.create(bind=engine)
125
#
126
#     # Write peak loads into db
127
#     with db.session_scope() as session:
128
#         session.bulk_insert_mappings(
129
#             table,
130
#             df.to_dict(orient="records"),
131
#         )
132
133
134
def psql_insert_copy(table, conn, keys, data_iter):
135
    """
136
    Execute SQL statement inserting data
137
138
    Parameters
139
    ----------
140
    table : pandas.io.sql.SQLTable
141
    conn : sqlalchemy.engine.Engine or sqlalchemy.engine.Connection
142
    keys : list of str
143
        Column names
144
    data_iter : Iterable that iterates the values to be inserted
145
    """
146
    # gets a DBAPI connection that can provide a cursor
147
    dbapi_conn = conn.connection
148
    with dbapi_conn.cursor() as cur:
149
        s_buf = StringIO()
150
        writer = csv.writer(s_buf)
151
        writer.writerows(data_iter)
152
        s_buf.seek(0)
153
154
        columns = ", ".join('"{}"'.format(k) for k in keys)
155
        if table.schema:
156
            table_name = "{}.{}".format(table.schema, table.name)
157
        else:
158
            table_name = table.name
159
160
        sql = "COPY {} ({}) FROM STDIN WITH CSV".format(table_name, columns)
161
        cur.copy_expert(sql=sql, file=s_buf)
162
163
164
def write_table_to_postgres(
165
    df, db_table, engine, drop=False, index=False, if_exists="append"
166
):
167
    """"""
168
169
    # Only take in db table defined columns and dtypes
170
    columns = {
171
        column.key: column.type for column in db_table.__table__.columns
172
    }
173
    df = df.loc[:, columns.keys()]
174
175
    if drop:
176
        db_table.__table__.drop(bind=engine, checkfirst=True)
177
        db_table.__table__.create(bind=engine)
178
179
    df.to_sql(
180
        name=db_table.__table__.name,
181
        schema=db_table.__table__.schema,
182
        con=engine,
183
        if_exists=if_exists,
184
        index=index,
185
        method=psql_insert_copy,
186
        dtype=columns,
187
    )
188