Total Complexity | 102 |
Total Lines | 557 |
Duplicated Lines | 98.03 % |
Changes | 0 |
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like reports.spaceload often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | import falcon |
||
2 | import simplejson as json |
||
3 | import mysql.connector |
||
4 | import config |
||
5 | from datetime import datetime, timedelta, timezone |
||
6 | from core import utilities |
||
7 | from decimal import Decimal |
||
8 | import excelexporters.spaceload |
||
9 | |||
10 | |||
11 | View Code Duplication | class Reporting: |
|
|
|||
12 | @staticmethod |
||
13 | def __init__(): |
||
14 | pass |
||
15 | |||
16 | @staticmethod |
||
17 | def on_options(req, resp): |
||
18 | resp.status = falcon.HTTP_200 |
||
19 | |||
20 | #################################################################################################################### |
||
21 | # PROCEDURES |
||
22 | # Step 1: valid parameters |
||
23 | # Step 2: query the space |
||
24 | # Step 3: query energy categories |
||
25 | # Step 4: query associated sensors |
||
26 | # Step 5: query associated points |
||
27 | # Step 6: query base period energy input |
||
28 | # Step 7: query reporting period energy input |
||
29 | # Step 8: query tariff data |
||
30 | # Step 9: query associated sensors and points data |
||
31 | # Step 10: construct the report |
||
32 | #################################################################################################################### |
||
33 | @staticmethod |
||
34 | def on_get(req, resp): |
||
35 | print(req.params) |
||
36 | space_id = req.params.get('spaceid') |
||
37 | period_type = req.params.get('periodtype') |
||
38 | base_start_datetime_local = req.params.get('baseperiodstartdatetime') |
||
39 | base_end_datetime_local = req.params.get('baseperiodenddatetime') |
||
40 | reporting_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
||
41 | reporting_end_datetime_local = req.params.get('reportingperiodenddatetime') |
||
42 | |||
43 | ################################################################################################################ |
||
44 | # Step 1: valid parameters |
||
45 | ################################################################################################################ |
||
46 | if space_id is None: |
||
47 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_SPACE_ID') |
||
48 | else: |
||
49 | space_id = str.strip(space_id) |
||
50 | if not space_id.isdigit() or int(space_id) <= 0: |
||
51 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_SPACE_ID') |
||
52 | |||
53 | if period_type is None: |
||
54 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
55 | else: |
||
56 | period_type = str.strip(period_type) |
||
57 | if period_type not in ['hourly', 'daily', 'monthly', 'yearly']: |
||
58 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
59 | |||
60 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
61 | if config.utc_offset[0] == '-': |
||
62 | timezone_offset = -timezone_offset |
||
63 | |||
64 | base_start_datetime_utc = None |
||
65 | if base_start_datetime_local is not None and len(str.strip(base_start_datetime_local)) > 0: |
||
66 | base_start_datetime_local = str.strip(base_start_datetime_local) |
||
67 | try: |
||
68 | base_start_datetime_utc = datetime.strptime(base_start_datetime_local, |
||
69 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
70 | timedelta(minutes=timezone_offset) |
||
71 | except ValueError: |
||
72 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
73 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
||
74 | |||
75 | base_end_datetime_utc = None |
||
76 | if base_end_datetime_local is not None and len(str.strip(base_end_datetime_local)) > 0: |
||
77 | base_end_datetime_local = str.strip(base_end_datetime_local) |
||
78 | try: |
||
79 | base_end_datetime_utc = datetime.strptime(base_end_datetime_local, |
||
80 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
81 | timedelta(minutes=timezone_offset) |
||
82 | except ValueError: |
||
83 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
84 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
||
85 | |||
86 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
||
87 | base_start_datetime_utc >= base_end_datetime_utc: |
||
88 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
89 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
||
90 | |||
91 | if reporting_start_datetime_local is None: |
||
92 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
93 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
94 | else: |
||
95 | reporting_start_datetime_local = str.strip(reporting_start_datetime_local) |
||
96 | try: |
||
97 | reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, |
||
98 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
99 | timedelta(minutes=timezone_offset) |
||
100 | except ValueError: |
||
101 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
102 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
103 | |||
104 | if reporting_end_datetime_local is None: |
||
105 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
106 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
107 | else: |
||
108 | reporting_end_datetime_local = str.strip(reporting_end_datetime_local) |
||
109 | try: |
||
110 | reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, |
||
111 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
112 | timedelta(minutes=timezone_offset) |
||
113 | except ValueError: |
||
114 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
115 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
116 | |||
117 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
118 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
119 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
||
120 | |||
121 | ################################################################################################################ |
||
122 | # Step 2: query the space |
||
123 | ################################################################################################################ |
||
124 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
125 | cursor_system = cnx_system.cursor() |
||
126 | |||
127 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
||
128 | cursor_energy = cnx_energy.cursor() |
||
129 | |||
130 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
131 | cursor_historical = cnx_historical.cursor() |
||
132 | |||
133 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
134 | " FROM tbl_spaces " |
||
135 | " WHERE id = %s ", (space_id,)) |
||
136 | row_space = cursor_system.fetchone() |
||
137 | if row_space is None: |
||
138 | if cursor_system: |
||
139 | cursor_system.close() |
||
140 | if cnx_system: |
||
141 | cnx_system.disconnect() |
||
142 | |||
143 | if cursor_energy: |
||
144 | cursor_energy.close() |
||
145 | if cnx_energy: |
||
146 | cnx_energy.disconnect() |
||
147 | |||
148 | if cnx_historical: |
||
149 | cnx_historical.close() |
||
150 | if cursor_historical: |
||
151 | cursor_historical.disconnect() |
||
152 | raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.SPACE_NOT_FOUND') |
||
153 | |||
154 | space = dict() |
||
155 | space['id'] = row_space[0] |
||
156 | space['name'] = row_space[1] |
||
157 | space['area'] = row_space[2] |
||
158 | space['cost_center_id'] = row_space[3] |
||
159 | |||
160 | ################################################################################################################ |
||
161 | # Step 3: query energy categories |
||
162 | ################################################################################################################ |
||
163 | energy_category_set = set() |
||
164 | # query energy categories in base period |
||
165 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
166 | " FROM tbl_space_input_category_hourly " |
||
167 | " WHERE space_id = %s " |
||
168 | " AND start_datetime_utc >= %s " |
||
169 | " AND start_datetime_utc < %s ", |
||
170 | (space['id'], base_start_datetime_utc, base_end_datetime_utc)) |
||
171 | rows_energy_categories = cursor_energy.fetchall() |
||
172 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
173 | for row_energy_category in rows_energy_categories: |
||
174 | energy_category_set.add(row_energy_category[0]) |
||
175 | |||
176 | # query energy categories in reporting period |
||
177 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
178 | " FROM tbl_space_input_category_hourly " |
||
179 | " WHERE space_id = %s " |
||
180 | " AND start_datetime_utc >= %s " |
||
181 | " AND start_datetime_utc < %s ", |
||
182 | (space['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
||
183 | rows_energy_categories = cursor_energy.fetchall() |
||
184 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
185 | for row_energy_category in rows_energy_categories: |
||
186 | energy_category_set.add(row_energy_category[0]) |
||
187 | |||
188 | # query all energy categories in base period and reporting period |
||
189 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
||
190 | " FROM tbl_energy_categories " |
||
191 | " ORDER BY id ", ) |
||
192 | rows_energy_categories = cursor_system.fetchall() |
||
193 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
||
194 | if cursor_system: |
||
195 | cursor_system.close() |
||
196 | if cnx_system: |
||
197 | cnx_system.disconnect() |
||
198 | |||
199 | if cursor_energy: |
||
200 | cursor_energy.close() |
||
201 | if cnx_energy: |
||
202 | cnx_energy.disconnect() |
||
203 | |||
204 | if cnx_historical: |
||
205 | cnx_historical.close() |
||
206 | if cursor_historical: |
||
207 | cursor_historical.disconnect() |
||
208 | raise falcon.HTTPError(falcon.HTTP_404, |
||
209 | title='API.NOT_FOUND', |
||
210 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
||
211 | energy_category_dict = dict() |
||
212 | for row_energy_category in rows_energy_categories: |
||
213 | if row_energy_category[0] in energy_category_set: |
||
214 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
||
215 | "unit_of_measure": row_energy_category[2], |
||
216 | "kgce": row_energy_category[3], |
||
217 | "kgco2e": row_energy_category[4]} |
||
218 | |||
219 | ################################################################################################################ |
||
220 | # Step 4: query associated sensors |
||
221 | ################################################################################################################ |
||
222 | point_list = list() |
||
223 | cursor_system.execute(" SELECT po.id, po.name, po.units, po.object_type " |
||
224 | " FROM tbl_spaces sp, tbl_sensors se, tbl_spaces_sensors spse, " |
||
225 | " tbl_points po, tbl_sensors_points sepo " |
||
226 | " WHERE sp.id = %s AND sp.id = spse.space_id AND spse.sensor_id = se.id " |
||
227 | " AND se.id = sepo.sensor_id AND sepo.point_id = po.id " |
||
228 | " ORDER BY po.id ", (space['id'], )) |
||
229 | rows_points = cursor_system.fetchall() |
||
230 | if rows_points is not None and len(rows_points) > 0: |
||
231 | for row in rows_points: |
||
232 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
233 | |||
234 | ################################################################################################################ |
||
235 | # Step 5: query associated points |
||
236 | ################################################################################################################ |
||
237 | cursor_system.execute(" SELECT po.id, po.name, po.units, po.object_type " |
||
238 | " FROM tbl_spaces sp, tbl_spaces_points sppo, tbl_points po " |
||
239 | " WHERE sp.id = %s AND sp.id = sppo.space_id AND sppo.point_id = po.id " |
||
240 | " ORDER BY po.id ", (space['id'], )) |
||
241 | rows_points = cursor_system.fetchall() |
||
242 | if rows_points is not None and len(rows_points) > 0: |
||
243 | for row in rows_points: |
||
244 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
245 | |||
246 | ################################################################################################################ |
||
247 | # Step 6: query base period energy input |
||
248 | ################################################################################################################ |
||
249 | base = dict() |
||
250 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
251 | for energy_category_id in energy_category_set: |
||
252 | base[energy_category_id] = dict() |
||
253 | base[energy_category_id]['timestamps'] = list() |
||
254 | base[energy_category_id]['sub_averages'] = list() |
||
255 | base[energy_category_id]['sub_maximums'] = list() |
||
256 | base[energy_category_id]['average'] = None |
||
257 | base[energy_category_id]['maximum'] = None |
||
258 | base[energy_category_id]['factor'] = None |
||
259 | |||
260 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
261 | " FROM tbl_space_input_category_hourly " |
||
262 | " WHERE space_id = %s " |
||
263 | " AND energy_category_id = %s " |
||
264 | " AND start_datetime_utc >= %s " |
||
265 | " AND start_datetime_utc < %s " |
||
266 | " ORDER BY start_datetime_utc ", |
||
267 | (space['id'], |
||
268 | energy_category_id, |
||
269 | base_start_datetime_utc, |
||
270 | base_end_datetime_utc)) |
||
271 | rows_space_hourly = cursor_energy.fetchall() |
||
272 | |||
273 | rows_space_periodically, \ |
||
274 | base[energy_category_id]['average'], \ |
||
275 | base[energy_category_id]['maximum'] = \ |
||
276 | utilities.averaging_hourly_data_by_period(rows_space_hourly, |
||
277 | base_start_datetime_utc, |
||
278 | base_end_datetime_utc, |
||
279 | period_type) |
||
280 | base[energy_category_id]['factor'] = \ |
||
281 | (base[energy_category_id]['average'] / base[energy_category_id]['maximum'] |
||
282 | if (base[energy_category_id]['average'] is not None and |
||
283 | base[energy_category_id]['maximum'] is not None and |
||
284 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
||
285 | else None) |
||
286 | |||
287 | for row_space_periodically in rows_space_periodically: |
||
288 | current_datetime_local = row_space_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
289 | timedelta(minutes=timezone_offset) |
||
290 | if period_type == 'hourly': |
||
291 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
292 | elif period_type == 'daily': |
||
293 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
294 | elif period_type == 'monthly': |
||
295 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
296 | elif period_type == 'yearly': |
||
297 | current_datetime = current_datetime_local.strftime('%Y') |
||
298 | |||
299 | base[energy_category_id]['timestamps'].append(current_datetime) |
||
300 | base[energy_category_id]['sub_averages'].append(row_space_periodically[1]) |
||
301 | base[energy_category_id]['sub_maximums'].append(row_space_periodically[2]) |
||
302 | |||
303 | ################################################################################################################ |
||
304 | # Step 7: query reporting period energy input |
||
305 | ################################################################################################################ |
||
306 | reporting = dict() |
||
307 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
308 | for energy_category_id in energy_category_set: |
||
309 | reporting[energy_category_id] = dict() |
||
310 | reporting[energy_category_id]['timestamps'] = list() |
||
311 | reporting[energy_category_id]['sub_averages'] = list() |
||
312 | reporting[energy_category_id]['sub_maximums'] = list() |
||
313 | reporting[energy_category_id]['average'] = None |
||
314 | reporting[energy_category_id]['maximum'] = None |
||
315 | reporting[energy_category_id]['factor'] = None |
||
316 | |||
317 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
318 | " FROM tbl_space_input_category_hourly " |
||
319 | " WHERE space_id = %s " |
||
320 | " AND energy_category_id = %s " |
||
321 | " AND start_datetime_utc >= %s " |
||
322 | " AND start_datetime_utc < %s " |
||
323 | " ORDER BY start_datetime_utc ", |
||
324 | (space['id'], |
||
325 | energy_category_id, |
||
326 | reporting_start_datetime_utc, |
||
327 | reporting_end_datetime_utc)) |
||
328 | rows_space_hourly = cursor_energy.fetchall() |
||
329 | |||
330 | rows_space_periodically, \ |
||
331 | reporting[energy_category_id]['average'], \ |
||
332 | reporting[energy_category_id]['maximum'] = \ |
||
333 | utilities.averaging_hourly_data_by_period(rows_space_hourly, |
||
334 | reporting_start_datetime_utc, |
||
335 | reporting_end_datetime_utc, |
||
336 | period_type) |
||
337 | reporting[energy_category_id]['factor'] = \ |
||
338 | (reporting[energy_category_id]['average'] / reporting[energy_category_id]['maximum'] |
||
339 | if (reporting[energy_category_id]['average'] is not None and |
||
340 | reporting[energy_category_id]['maximum'] is not None and |
||
341 | reporting[energy_category_id]['maximum'] > Decimal(0.0)) |
||
342 | else None) |
||
343 | |||
344 | for row_space_periodically in rows_space_periodically: |
||
345 | current_datetime_local = row_space_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
346 | timedelta(minutes=timezone_offset) |
||
347 | if period_type == 'hourly': |
||
348 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
349 | elif period_type == 'daily': |
||
350 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
351 | elif period_type == 'monthly': |
||
352 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
353 | elif period_type == 'yearly': |
||
354 | current_datetime = current_datetime_local.strftime('%Y') |
||
355 | |||
356 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
||
357 | reporting[energy_category_id]['sub_averages'].append(row_space_periodically[1]) |
||
358 | reporting[energy_category_id]['sub_maximums'].append(row_space_periodically[2]) |
||
359 | |||
360 | ################################################################################################################ |
||
361 | # Step 8: query tariff data |
||
362 | ################################################################################################################ |
||
363 | parameters_data = dict() |
||
364 | parameters_data['names'] = list() |
||
365 | parameters_data['timestamps'] = list() |
||
366 | parameters_data['values'] = list() |
||
367 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
368 | for energy_category_id in energy_category_set: |
||
369 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(space['cost_center_id'], |
||
370 | energy_category_id, |
||
371 | reporting_start_datetime_utc, |
||
372 | reporting_end_datetime_utc) |
||
373 | tariff_timestamp_list = list() |
||
374 | tariff_value_list = list() |
||
375 | for k, v in energy_category_tariff_dict.items(): |
||
376 | # convert k from utc to local |
||
377 | k = k + timedelta(minutes=timezone_offset) |
||
378 | tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) |
||
379 | tariff_value_list.append(v) |
||
380 | |||
381 | parameters_data['names'].append('TARIFF-' + energy_category_dict[energy_category_id]['name']) |
||
382 | parameters_data['timestamps'].append(tariff_timestamp_list) |
||
383 | parameters_data['values'].append(tariff_value_list) |
||
384 | |||
385 | ################################################################################################################ |
||
386 | # Step 9: query associated sensors and points data |
||
387 | ################################################################################################################ |
||
388 | for point in point_list: |
||
389 | point_values = [] |
||
390 | point_timestamps = [] |
||
391 | if point['object_type'] == 'ANALOG_VALUE': |
||
392 | query = (" SELECT utc_date_time, actual_value " |
||
393 | " FROM tbl_analog_value " |
||
394 | " WHERE point_id = %s " |
||
395 | " AND utc_date_time BETWEEN %s AND %s " |
||
396 | " ORDER BY utc_date_time ") |
||
397 | cursor_historical.execute(query, (point['id'], |
||
398 | reporting_start_datetime_utc, |
||
399 | reporting_end_datetime_utc)) |
||
400 | rows = cursor_historical.fetchall() |
||
401 | |||
402 | if rows is not None and len(rows) > 0: |
||
403 | for row in rows: |
||
404 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
405 | timedelta(minutes=timezone_offset) |
||
406 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
407 | point_timestamps.append(current_datetime) |
||
408 | point_values.append(row[1]) |
||
409 | |||
410 | elif point['object_type'] == 'ENERGY_VALUE': |
||
411 | query = (" SELECT utc_date_time, actual_value " |
||
412 | " FROM tbl_energy_value " |
||
413 | " WHERE point_id = %s " |
||
414 | " AND utc_date_time BETWEEN %s AND %s " |
||
415 | " ORDER BY utc_date_time ") |
||
416 | cursor_historical.execute(query, (point['id'], |
||
417 | reporting_start_datetime_utc, |
||
418 | reporting_end_datetime_utc)) |
||
419 | rows = cursor_historical.fetchall() |
||
420 | |||
421 | if rows is not None and len(rows) > 0: |
||
422 | for row in rows: |
||
423 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
424 | timedelta(minutes=timezone_offset) |
||
425 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
426 | point_timestamps.append(current_datetime) |
||
427 | point_values.append(row[1]) |
||
428 | elif point['object_type'] == 'DIGITAL_VALUE': |
||
429 | query = (" SELECT utc_date_time, actual_value " |
||
430 | " FROM tbl_digital_value " |
||
431 | " WHERE point_id = %s " |
||
432 | " AND utc_date_time BETWEEN %s AND %s ") |
||
433 | cursor_historical.execute(query, (point['id'], |
||
434 | reporting_start_datetime_utc, |
||
435 | reporting_end_datetime_utc)) |
||
436 | rows = cursor_historical.fetchall() |
||
437 | |||
438 | if rows is not None and len(rows) > 0: |
||
439 | for row in rows: |
||
440 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
441 | timedelta(minutes=timezone_offset) |
||
442 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
443 | point_timestamps.append(current_datetime) |
||
444 | point_values.append(row[1]) |
||
445 | |||
446 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
||
447 | parameters_data['timestamps'].append(point_timestamps) |
||
448 | parameters_data['values'].append(point_values) |
||
449 | |||
450 | ################################################################################################################ |
||
451 | # Step 10: construct the report |
||
452 | ################################################################################################################ |
||
453 | if cursor_system: |
||
454 | cursor_system.close() |
||
455 | if cnx_system: |
||
456 | cnx_system.disconnect() |
||
457 | |||
458 | if cursor_energy: |
||
459 | cursor_energy.close() |
||
460 | if cnx_energy: |
||
461 | cnx_energy.disconnect() |
||
462 | |||
463 | result = dict() |
||
464 | |||
465 | result['space'] = dict() |
||
466 | result['space']['name'] = space['name'] |
||
467 | result['space']['area'] = space['area'] |
||
468 | |||
469 | result['base_period'] = dict() |
||
470 | result['base_period']['names'] = list() |
||
471 | result['base_period']['units'] = list() |
||
472 | result['base_period']['timestamps'] = list() |
||
473 | result['base_period']['sub_averages'] = list() |
||
474 | result['base_period']['sub_maximums'] = list() |
||
475 | result['base_period']['averages'] = list() |
||
476 | result['base_period']['maximums'] = list() |
||
477 | result['base_period']['factors'] = list() |
||
478 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
479 | for energy_category_id in energy_category_set: |
||
480 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
481 | result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
482 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
||
483 | result['base_period']['sub_averages'].append(base[energy_category_id]['sub_averages']) |
||
484 | result['base_period']['sub_maximums'].append(base[energy_category_id]['sub_maximums']) |
||
485 | result['base_period']['averages'].append(base[energy_category_id]['average']) |
||
486 | result['base_period']['maximums'].append(base[energy_category_id]['maximum']) |
||
487 | result['base_period']['factors'].append(base[energy_category_id]['factor']) |
||
488 | |||
489 | result['reporting_period'] = dict() |
||
490 | result['reporting_period']['names'] = list() |
||
491 | result['reporting_period']['energy_category_ids'] = list() |
||
492 | result['reporting_period']['units'] = list() |
||
493 | result['reporting_period']['timestamps'] = list() |
||
494 | result['reporting_period']['sub_averages'] = list() |
||
495 | result['reporting_period']['sub_maximums'] = list() |
||
496 | result['reporting_period']['averages'] = list() |
||
497 | result['reporting_period']['averages_per_unit_area'] = list() |
||
498 | result['reporting_period']['averages_increment_rate'] = list() |
||
499 | result['reporting_period']['maximums'] = list() |
||
500 | result['reporting_period']['maximums_per_unit_area'] = list() |
||
501 | result['reporting_period']['maximums_increment_rate'] = list() |
||
502 | result['reporting_period']['factors'] = list() |
||
503 | result['reporting_period']['factors_increment_rate'] = list() |
||
504 | |||
505 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
506 | for energy_category_id in energy_category_set: |
||
507 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
508 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
||
509 | result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
510 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
||
511 | result['reporting_period']['sub_averages'].append(reporting[energy_category_id]['sub_averages']) |
||
512 | result['reporting_period']['sub_maximums'].append(reporting[energy_category_id]['sub_maximums']) |
||
513 | result['reporting_period']['averages'].append(reporting[energy_category_id]['average']) |
||
514 | result['reporting_period']['averages_per_unit_area'].append( |
||
515 | reporting[energy_category_id]['average'] / space['area'] |
||
516 | if reporting[energy_category_id]['average'] is not None and |
||
517 | space['area'] is not None and |
||
518 | space['area'] > Decimal(0.0) |
||
519 | else None) |
||
520 | result['reporting_period']['averages_increment_rate'].append( |
||
521 | (reporting[energy_category_id]['average'] - base[energy_category_id]['average']) / |
||
522 | base[energy_category_id]['average'] if (base[energy_category_id]['average'] is not None and |
||
523 | base[energy_category_id]['average'] > Decimal(0.0)) |
||
524 | else None) |
||
525 | result['reporting_period']['maximums'].append(reporting[energy_category_id]['maximum']) |
||
526 | result['reporting_period']['maximums_increment_rate'].append( |
||
527 | (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / |
||
528 | base[energy_category_id]['maximum'] if (base[energy_category_id]['maximum'] is not None and |
||
529 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
||
530 | else None) |
||
531 | result['reporting_period']['maximums_per_unit_area'].append( |
||
532 | reporting[energy_category_id]['maximum'] / space['area'] |
||
533 | if reporting[energy_category_id]['maximum'] is not None and |
||
534 | space['area'] is not None and |
||
535 | space['area'] > Decimal(0.0) |
||
536 | else None) |
||
537 | result['reporting_period']['factors'].append(reporting[energy_category_id]['factor']) |
||
538 | result['reporting_period']['factors_increment_rate'].append( |
||
539 | (reporting[energy_category_id]['factor'] - base[energy_category_id]['factor']) / |
||
540 | base[energy_category_id]['factor'] if (base[energy_category_id]['factor'] is not None and |
||
541 | base[energy_category_id]['factor'] > Decimal(0.0)) |
||
542 | else None) |
||
543 | |||
544 | result['parameters'] = { |
||
545 | "names": parameters_data['names'], |
||
546 | "timestamps": parameters_data['timestamps'], |
||
547 | "values": parameters_data['values'] |
||
548 | } |
||
549 | # export result to Excel file and then encode the file to base64 string |
||
550 | result['excel_bytes_base64'] = excelexporters.spaceload.export(result, |
||
551 | space['name'], |
||
552 | reporting_start_datetime_local, |
||
553 | reporting_end_datetime_local, |
||
554 | period_type) |
||
555 | |||
556 | resp.body = json.dumps(result) |
||
557 |