Total Complexity | 113 |
Total Lines | 676 |
Duplicated Lines | 98.08 % |
Changes | 0 |
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like reports.microgridreportingcarbon often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | import re |
||
2 | from datetime import datetime, timedelta, timezone |
||
3 | from decimal import Decimal |
||
4 | import falcon |
||
5 | import mysql.connector |
||
6 | import simplejson as json |
||
7 | import config |
||
8 | import excelexporters.microgridreportingcarbon |
||
9 | from core import utilities |
||
10 | from core.useractivity import access_control, api_key_control |
||
11 | |||
12 | |||
13 | View Code Duplication | class Reporting: |
|
|
|||
14 | def __init__(self): |
||
15 | """Initializes Class""" |
||
16 | pass |
||
17 | |||
18 | @staticmethod |
||
19 | def on_options(req, resp): |
||
20 | _ = req |
||
21 | resp.status = falcon.HTTP_200 |
||
22 | |||
23 | #################################################################################################################### |
||
24 | # PROCEDURES |
||
25 | # Step 1: valid parameters |
||
26 | # Step 2: query the microgrid |
||
27 | # Step 3: query associated batteries |
||
28 | # Step 4: query associated ev chargers |
||
29 | # Step 5: query associated generators |
||
30 | # Step 6: query associated grids |
||
31 | # Step 7: query associated heat pumps |
||
32 | # Step 8: query associated loads |
||
33 | # Step 9: query associated photovoltaics |
||
34 | # Step 10: query associated power conversion systems |
||
35 | # Step 11: query associated sensors |
||
36 | # Step 12: query associated meters data |
||
37 | # Step 13: query associated points data |
||
38 | # Step 14: construct the report |
||
39 | #################################################################################################################### |
||
40 | @staticmethod |
||
41 | def on_get(req, resp): |
||
42 | if 'API-KEY' not in req.headers or \ |
||
43 | not isinstance(req.headers['API-KEY'], str) or \ |
||
44 | len(str.strip(req.headers['API-KEY'])) == 0: |
||
45 | access_control(req) |
||
46 | else: |
||
47 | api_key_control(req) |
||
48 | print(req.params) |
||
49 | # this procedure accepts microgrid id or |
||
50 | # microgrid uuid to identify a microgrid |
||
51 | microgrid_id = req.params.get('id') |
||
52 | microgrid_uuid = req.params.get('uuid') |
||
53 | period_type = req.params.get('periodtype') |
||
54 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
||
55 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
||
56 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
||
57 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
||
58 | language = req.params.get('language') |
||
59 | quick_mode = req.params.get('quickmode') |
||
60 | |||
61 | ################################################################################################################ |
||
62 | # Step 1: valid parameters |
||
63 | ################################################################################################################ |
||
64 | if microgrid_id is None and microgrid_uuid is None: |
||
65 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
66 | description='API.INVALID_MICROGRID_ID') |
||
67 | |||
68 | if microgrid_id is not None: |
||
69 | microgrid_id = str.strip(microgrid_id) |
||
70 | if not microgrid_id.isdigit() or int(microgrid_id) <= 0: |
||
71 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
72 | description='API.INVALID_MICROGRID_ID') |
||
73 | |||
74 | if microgrid_uuid is not None: |
||
75 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
||
76 | match = regex.match(str.strip(microgrid_uuid)) |
||
77 | if not bool(match): |
||
78 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
79 | description='API.INVALID_MICROGRID_UUID') |
||
80 | |||
81 | if period_type is None: |
||
82 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
83 | description='API.INVALID_PERIOD_TYPE') |
||
84 | else: |
||
85 | period_type = str.strip(period_type) |
||
86 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
||
87 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
88 | description='API.INVALID_PERIOD_TYPE') |
||
89 | |||
90 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
91 | if config.utc_offset[0] == '-': |
||
92 | timezone_offset = -timezone_offset |
||
93 | |||
94 | base_start_datetime_utc = None |
||
95 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
||
96 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
||
97 | try: |
||
98 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
||
99 | except ValueError: |
||
100 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
101 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
||
102 | base_start_datetime_utc = \ |
||
103 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
||
104 | # nomalize the start datetime |
||
105 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
||
106 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
||
107 | else: |
||
108 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
||
109 | |||
110 | base_end_datetime_utc = None |
||
111 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
||
112 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
||
113 | try: |
||
114 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
||
115 | except ValueError: |
||
116 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
117 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
||
118 | base_end_datetime_utc = \ |
||
119 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
||
120 | |||
121 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
||
122 | base_start_datetime_utc >= base_end_datetime_utc: |
||
123 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
124 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
||
125 | |||
126 | if reporting_period_start_datetime_local is None: |
||
127 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
128 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
129 | else: |
||
130 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
||
131 | try: |
||
132 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
||
133 | '%Y-%m-%dT%H:%M:%S') |
||
134 | except ValueError: |
||
135 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
136 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
137 | reporting_start_datetime_utc = \ |
||
138 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
||
139 | # nomalize the start datetime |
||
140 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
||
141 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
||
142 | else: |
||
143 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
||
144 | |||
145 | if reporting_period_end_datetime_local is None: |
||
146 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
147 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
148 | else: |
||
149 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
||
150 | try: |
||
151 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
||
152 | '%Y-%m-%dT%H:%M:%S') |
||
153 | except ValueError: |
||
154 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
155 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
156 | reporting_end_datetime_utc = reporting_end_datetime_utc.replace(tzinfo=timezone.utc) - \ |
||
157 | timedelta(minutes=timezone_offset) |
||
158 | |||
159 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
160 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
161 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
||
162 | |||
163 | # if turn quick mode on, do not return parameters data and excel file |
||
164 | is_quick_mode = False |
||
165 | if quick_mode is not None and \ |
||
166 | len(str.strip(quick_mode)) > 0 and \ |
||
167 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
||
168 | is_quick_mode = True |
||
169 | |||
170 | trans = utilities.get_translation(language) |
||
171 | trans.install() |
||
172 | _ = trans.gettext |
||
173 | |||
174 | ################################################################################################################ |
||
175 | # Step 2: query the microgrid |
||
176 | ################################################################################################################ |
||
177 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
178 | cursor_system = cnx_system.cursor() |
||
179 | |||
180 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
181 | cursor_historical = cnx_historical.cursor() |
||
182 | |||
183 | cnx_carbon = mysql.connector.connect(**config.myems_carbon_db) |
||
184 | cursor_carbon = cnx_carbon.cursor() |
||
185 | |||
186 | # query all contacts in system |
||
187 | query = (" SELECT id, name, uuid " |
||
188 | " FROM tbl_contacts ") |
||
189 | cursor_system.execute(query) |
||
190 | rows_contacts = cursor_system.fetchall() |
||
191 | |||
192 | contact_dict = dict() |
||
193 | if rows_contacts is not None and len(rows_contacts) > 0: |
||
194 | for row in rows_contacts: |
||
195 | contact_dict[row[0]] = {"id": row[0], |
||
196 | "name": row[1], |
||
197 | "uuid": row[2]} |
||
198 | # query all cost centers in system |
||
199 | query = (" SELECT id, name, uuid " |
||
200 | " FROM tbl_cost_centers ") |
||
201 | cursor_system.execute(query) |
||
202 | rows_cost_centers = cursor_system.fetchall() |
||
203 | |||
204 | cost_center_dict = dict() |
||
205 | if rows_cost_centers is not None and len(rows_cost_centers) > 0: |
||
206 | for row in rows_cost_centers: |
||
207 | cost_center_dict[row[0]] = {"id": row[0], |
||
208 | "name": row[1], |
||
209 | "uuid": row[2]} |
||
210 | |||
211 | # query all energy categories in system |
||
212 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
||
213 | " FROM tbl_energy_categories " |
||
214 | " ORDER BY id ", ) |
||
215 | rows_energy_categories = cursor_system.fetchall() |
||
216 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
||
217 | if cursor_system: |
||
218 | cursor_system.close() |
||
219 | if cnx_system: |
||
220 | cnx_system.close() |
||
221 | raise falcon.HTTPError(status=falcon.HTTP_404, |
||
222 | title='API.NOT_FOUND', |
||
223 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
||
224 | energy_category_dict = dict() |
||
225 | for row_energy_category in rows_energy_categories: |
||
226 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
||
227 | "unit_of_measure": row_energy_category[2], |
||
228 | "kgce": row_energy_category[3], |
||
229 | "kgco2e": row_energy_category[4]} |
||
230 | |||
231 | if microgrid_id is not None: |
||
232 | query = (" SELECT id, name, uuid, " |
||
233 | " address, postal_code, latitude, longitude, rated_capacity, rated_power, " |
||
234 | " contact_id, cost_center_id " |
||
235 | " FROM tbl_microgrids " |
||
236 | " WHERE id = %s ") |
||
237 | cursor_system.execute(query, (microgrid_id,)) |
||
238 | row = cursor_system.fetchone() |
||
239 | elif microgrid_uuid is not None: |
||
240 | query = (" SELECT id, name, uuid, " |
||
241 | " address, postal_code, latitude, longitude, rated_capacity, rated_power, " |
||
242 | " contact_id, cost_center_id " |
||
243 | " FROM tbl_microgrids " |
||
244 | " WHERE uuid = %s ") |
||
245 | cursor_system.execute(query, (microgrid_uuid,)) |
||
246 | row = cursor_system.fetchone() |
||
247 | |||
248 | if row is None: |
||
249 | cursor_system.close() |
||
250 | cnx_system.close() |
||
251 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', |
||
252 | description='API.MICROGRID_NOT_FOUND') |
||
253 | else: |
||
254 | microgrid_id = row[0] |
||
255 | meta_result = {"id": row[0], |
||
256 | "name": row[1], |
||
257 | "uuid": row[2], |
||
258 | "address": row[3], |
||
259 | "postal_code": row[4], |
||
260 | "latitude": row[5], |
||
261 | "longitude": row[6], |
||
262 | "rated_capacity": row[7], |
||
263 | "rated_power": row[8], |
||
264 | "contact": contact_dict.get(row[9], None), |
||
265 | "cost_center": cost_center_dict.get(row[10], None), |
||
266 | "qrcode": 'microgrid:' + row[2]} |
||
267 | |||
268 | point_list = list() |
||
269 | meter_list = list() |
||
270 | |||
271 | ################################################################################################################ |
||
272 | # Step 4: query associated batteries data |
||
273 | ################################################################################################################ |
||
274 | cursor_system.execute(" SELECT p.id, mb.name, p.units, p.object_type " |
||
275 | " FROM tbl_microgrids_batteries mb, tbl_points p " |
||
276 | " WHERE mb.microgrid_id = %s AND mb.soc_point_id = p.id ", |
||
277 | (microgrid_id,)) |
||
278 | row_point = cursor_system.fetchone() |
||
279 | if row_point is not None: |
||
280 | point_list.append({"id": row_point[0], |
||
281 | "name": row_point[1] + '.SOC', |
||
282 | "units": row_point[2], |
||
283 | "object_type": row_point[3]}) |
||
284 | |||
285 | cursor_system.execute(" SELECT p.id, mb.name, p.units, p.object_type " |
||
286 | " FROM tbl_microgrids_batteries mb, tbl_points p " |
||
287 | " WHERE mb.microgrid_id = %s AND mb.power_point_id = p.id ", |
||
288 | (microgrid_id,)) |
||
289 | row_point = cursor_system.fetchone() |
||
290 | if row_point is not None: |
||
291 | point_list.append({"id": row_point[0], |
||
292 | "name": row_point[1]+'.P', |
||
293 | "units": row_point[2], |
||
294 | "object_type": row_point[3]}) |
||
295 | |||
296 | cursor_system.execute(" SELECT m.id, mb.name, m.energy_category_id " |
||
297 | " FROM tbl_microgrids_batteries mb, tbl_meters m " |
||
298 | " WHERE mb.microgrid_id = %s AND mb.charge_meter_id = m.id ", |
||
299 | (microgrid_id,)) |
||
300 | row_meter = cursor_system.fetchone() |
||
301 | if row_meter is not None: |
||
302 | meter_list.append({"id": row_meter[0], |
||
303 | "name": row_meter[1] + '-Charge', |
||
304 | "energy_category_id": row_meter[2]}) |
||
305 | |||
306 | cursor_system.execute(" SELECT m.id, mb.name, m.energy_category_id " |
||
307 | " FROM tbl_microgrids_batteries mb, tbl_meters m " |
||
308 | " WHERE mb.microgrid_id = %s AND mb.discharge_meter_id = m.id ", |
||
309 | (microgrid_id,)) |
||
310 | row_meter = cursor_system.fetchone() |
||
311 | if row_meter is not None: |
||
312 | meter_list.append({"id": row_meter[0], |
||
313 | "name": row_meter[1] + '-Discharge', |
||
314 | "energy_category_id": row_meter[2]}) |
||
315 | |||
316 | ################################################################################################################ |
||
317 | # Step 4: query associated ev chargers |
||
318 | ################################################################################################################ |
||
319 | |||
320 | ################################################################################################################ |
||
321 | # Step 5: query associated generators |
||
322 | ################################################################################################################ |
||
323 | |||
324 | ################################################################################################################ |
||
325 | # Step 6: query associated grids |
||
326 | ################################################################################################################ |
||
327 | cursor_system.execute(" SELECT p.id, mg.name, p.units, p.object_type " |
||
328 | " FROM tbl_microgrids_grids mg, tbl_points p " |
||
329 | " WHERE mg.microgrid_id = %s AND mg.power_point_id = p.id ", |
||
330 | (microgrid_id,)) |
||
331 | row_point = cursor_system.fetchone() |
||
332 | if row_point is not None: |
||
333 | point_list.append({"id": row_point[0], |
||
334 | "name": row_point[1]+'.P', |
||
335 | "units": row_point[2], |
||
336 | "object_type": row_point[3]}) |
||
337 | |||
338 | cursor_system.execute(" SELECT m.id, mg.name, m.energy_category_id " |
||
339 | " FROM tbl_microgrids_grids mg, tbl_meters m " |
||
340 | " WHERE mg.microgrid_id = %s AND mg.buy_meter_id = m.id ", |
||
341 | (microgrid_id,)) |
||
342 | row_meter = cursor_system.fetchone() |
||
343 | if row_meter is not None: |
||
344 | meter_list.append({"id": row_meter[0], |
||
345 | "name": row_meter[1] + '-Buy', |
||
346 | "energy_category_id": row_meter[2]}) |
||
347 | |||
348 | cursor_system.execute(" SELECT m.id, mg.name, m.energy_category_id " |
||
349 | " FROM tbl_microgrids_grids mg, tbl_meters m " |
||
350 | " WHERE mg.microgrid_id = %s AND mg.sell_meter_id = m.id ", |
||
351 | (microgrid_id,)) |
||
352 | row_meter = cursor_system.fetchone() |
||
353 | if row_meter is not None: |
||
354 | meter_list.append({"id": row_meter[0], |
||
355 | "name": row_meter[1] + '-Sell', |
||
356 | "energy_category_id": row_meter[2]}) |
||
357 | |||
358 | ################################################################################################################ |
||
359 | # Step 7: query associated heat pumps |
||
360 | ################################################################################################################ |
||
361 | |||
362 | ################################################################################################################ |
||
363 | # Step 8: query associated loads |
||
364 | ################################################################################################################ |
||
365 | cursor_system.execute(" SELECT p.id, ml.name, p.units, p.object_type " |
||
366 | " FROM tbl_microgrids_loads ml, tbl_points p " |
||
367 | " WHERE ml.microgrid_id = %s AND ml.power_point_id = p.id ", |
||
368 | (microgrid_id,)) |
||
369 | row_point = cursor_system.fetchone() |
||
370 | if row_point is not None: |
||
371 | point_list.append({"id": row_point[0], |
||
372 | "name": row_point[1]+'.P', |
||
373 | "units": row_point[2], |
||
374 | "object_type": row_point[3]}) |
||
375 | |||
376 | cursor_system.execute(" SELECT m.id, ml.name, m.energy_category_id " |
||
377 | " FROM tbl_microgrids_loads ml, tbl_meters m " |
||
378 | " WHERE ml.microgrid_id = %s AND ml.meter_id = m.id ", |
||
379 | (microgrid_id,)) |
||
380 | row_meter = cursor_system.fetchone() |
||
381 | if row_meter is not None: |
||
382 | meter_list.append({"id": row_meter[0], |
||
383 | "name": row_meter[1], |
||
384 | "energy_category_id": row_meter[2]}) |
||
385 | |||
386 | ################################################################################################################ |
||
387 | # Step 9: query associated photovoltaics |
||
388 | ################################################################################################################ |
||
389 | # todo |
||
390 | |||
391 | ################################################################################################################ |
||
392 | # Step 10: query associated power conversion systems |
||
393 | ################################################################################################################ |
||
394 | # todo |
||
395 | |||
396 | ################################################################################################################ |
||
397 | # Step 11: query associated sensors |
||
398 | ################################################################################################################ |
||
399 | # todo |
||
400 | |||
401 | ################################################################################################################ |
||
402 | # Step 12: query associated meters data |
||
403 | ################################################################################################################ |
||
404 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
405 | if config.utc_offset[0] == '-': |
||
406 | timezone_offset = -timezone_offset |
||
407 | |||
408 | meter_base_list = list() |
||
409 | |||
410 | for meter in meter_list: |
||
411 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
||
412 | " FROM tbl_meter_hourly " |
||
413 | " WHERE meter_id = %s " |
||
414 | " AND start_datetime_utc >= %s " |
||
415 | " AND start_datetime_utc < %s " |
||
416 | " ORDER BY start_datetime_utc ", |
||
417 | (meter['id'], |
||
418 | base_start_datetime_utc, |
||
419 | base_end_datetime_utc)) |
||
420 | rows_meter_hourly = cursor_carbon.fetchall() |
||
421 | |||
422 | if rows_meter_hourly is not None and len(rows_meter_hourly) > 0: |
||
423 | print('rows_meter_hourly:' + str(rows_meter_hourly)) |
||
424 | rows_meter_periodically = utilities.aggregate_hourly_data_by_period(rows_meter_hourly, |
||
425 | base_start_datetime_utc, |
||
426 | base_end_datetime_utc, |
||
427 | period_type) |
||
428 | print('rows_meter_periodically:' + str(rows_meter_periodically)) |
||
429 | meter_report = dict() |
||
430 | meter_report['timestamps'] = list() |
||
431 | meter_report['values'] = list() |
||
432 | meter_report['subtotal'] = Decimal(0.0) |
||
433 | |||
434 | for row_meter_periodically in rows_meter_periodically: |
||
435 | current_datetime_local = row_meter_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
436 | timedelta(minutes=timezone_offset) |
||
437 | if period_type == 'hourly': |
||
438 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
439 | elif period_type == 'daily': |
||
440 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
441 | elif period_type == 'weekly': |
||
442 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
443 | elif period_type == 'monthly': |
||
444 | current_datetime = current_datetime_local.isoformat()[0:7] |
||
445 | elif period_type == 'yearly': |
||
446 | current_datetime = current_datetime_local.isoformat()[0:4] |
||
447 | |||
448 | actual_value = Decimal(0.0) if row_meter_periodically[1] is None else row_meter_periodically[1] |
||
449 | |||
450 | meter_report['timestamps'].append(current_datetime) |
||
451 | meter_report['values'].append(actual_value) |
||
452 | meter_report['subtotal'] += actual_value |
||
453 | meter_report['name'] = meter['name'] |
||
454 | meter_report['unit_of_measure'] = \ |
||
455 | energy_category_dict[meter['energy_category_id']]['unit_of_measure'] |
||
456 | |||
457 | meter_base_list.append(meter_report) |
||
458 | |||
459 | meter_reporting_list = list() |
||
460 | |||
461 | for meter in meter_list: |
||
462 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
||
463 | " FROM tbl_meter_hourly " |
||
464 | " WHERE meter_id = %s " |
||
465 | " AND start_datetime_utc >= %s " |
||
466 | " AND start_datetime_utc < %s " |
||
467 | " ORDER BY start_datetime_utc ", |
||
468 | (meter['id'], |
||
469 | reporting_start_datetime_utc, |
||
470 | reporting_end_datetime_utc)) |
||
471 | rows_meter_hourly = cursor_carbon.fetchall() |
||
472 | if rows_meter_hourly is not None and len(rows_meter_hourly) > 0: |
||
473 | rows_meter_periodically = utilities.aggregate_hourly_data_by_period(rows_meter_hourly, |
||
474 | reporting_start_datetime_utc, |
||
475 | reporting_end_datetime_utc, |
||
476 | period_type) |
||
477 | meter_report = dict() |
||
478 | meter_report['timestamps'] = list() |
||
479 | meter_report['values'] = list() |
||
480 | meter_report['subtotal'] = Decimal(0.0) |
||
481 | |||
482 | for row_meter_periodically in rows_meter_periodically: |
||
483 | current_datetime_local = row_meter_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
484 | timedelta(minutes=timezone_offset) |
||
485 | if period_type == 'hourly': |
||
486 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
487 | elif period_type == 'daily': |
||
488 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
489 | elif period_type == 'weekly': |
||
490 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
491 | elif period_type == 'monthly': |
||
492 | current_datetime = current_datetime_local.isoformat()[0:7] |
||
493 | elif period_type == 'yearly': |
||
494 | current_datetime = current_datetime_local.isoformat()[0:4] |
||
495 | |||
496 | actual_value = Decimal(0.0) if row_meter_periodically[1] is None else row_meter_periodically[1] |
||
497 | |||
498 | meter_report['timestamps'].append(current_datetime) |
||
499 | meter_report['values'].append(actual_value) |
||
500 | meter_report['subtotal'] += actual_value |
||
501 | meter_report['name'] = meter['name'] |
||
502 | meter_report['unit_of_measure'] = \ |
||
503 | energy_category_dict[meter['energy_category_id']]['unit_of_measure'] |
||
504 | |||
505 | meter_reporting_list.append(meter_report) |
||
506 | |||
507 | ################################################################################################################ |
||
508 | # Step 13: query associated points data |
||
509 | ################################################################################################################ |
||
510 | |||
511 | parameters_data = dict() |
||
512 | parameters_data['names'] = list() |
||
513 | parameters_data['timestamps'] = list() |
||
514 | parameters_data['values'] = list() |
||
515 | |||
516 | for point in point_list: |
||
517 | point_values = [] |
||
518 | point_timestamps = [] |
||
519 | if point['object_type'] == 'ENERGY_VALUE': |
||
520 | query = (" SELECT utc_date_time, actual_value " |
||
521 | " FROM tbl_energy_value " |
||
522 | " WHERE point_id = %s " |
||
523 | " AND utc_date_time BETWEEN %s AND %s " |
||
524 | " ORDER BY utc_date_time ") |
||
525 | cursor_historical.execute(query, (point['id'], |
||
526 | reporting_start_datetime_utc, |
||
527 | reporting_end_datetime_utc)) |
||
528 | rows = cursor_historical.fetchall() |
||
529 | |||
530 | if rows is not None and len(rows) > 0: |
||
531 | reporting_start_datetime_local = reporting_start_datetime_utc.replace(tzinfo=timezone.utc) + \ |
||
532 | timedelta(minutes=timezone_offset) |
||
533 | current_datetime_local = reporting_start_datetime_local |
||
534 | |||
535 | while current_datetime_local < rows[0][0].replace(tzinfo=timezone.utc) + \ |
||
536 | timedelta(minutes=timezone_offset): |
||
537 | point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
||
538 | point_values.append(rows[0][1]) |
||
539 | current_datetime_local += timedelta(minutes=1) |
||
540 | |||
541 | for index in range(len(rows) - 1): |
||
542 | while current_datetime_local < rows[index + 1][0].replace(tzinfo=timezone.utc) + \ |
||
543 | timedelta(minutes=timezone_offset): |
||
544 | point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
||
545 | point_values.append(rows[index][1]) |
||
546 | current_datetime_local += timedelta(minutes=1) |
||
547 | elif point['object_type'] == 'ANALOG_VALUE': |
||
548 | query = (" SELECT utc_date_time, actual_value " |
||
549 | " FROM tbl_analog_value " |
||
550 | " WHERE point_id = %s " |
||
551 | " AND utc_date_time BETWEEN %s AND %s " |
||
552 | " ORDER BY utc_date_time ") |
||
553 | cursor_historical.execute(query, (point['id'], |
||
554 | reporting_start_datetime_utc, |
||
555 | reporting_end_datetime_utc)) |
||
556 | rows = cursor_historical.fetchall() |
||
557 | |||
558 | if rows is not None and len(rows) > 0: |
||
559 | reporting_start_datetime_local = reporting_start_datetime_utc.replace(tzinfo=timezone.utc) + \ |
||
560 | timedelta(minutes=timezone_offset) |
||
561 | current_datetime_local = reporting_start_datetime_local |
||
562 | |||
563 | while current_datetime_local < rows[0][0].replace(tzinfo=timezone.utc) + \ |
||
564 | timedelta(minutes=timezone_offset): |
||
565 | point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
||
566 | point_values.append(rows[0][1]) |
||
567 | current_datetime_local += timedelta(minutes=1) |
||
568 | |||
569 | for index in range(len(rows) - 1): |
||
570 | while current_datetime_local < rows[index + 1][0].replace(tzinfo=timezone.utc) + \ |
||
571 | timedelta(minutes=timezone_offset): |
||
572 | point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
||
573 | point_values.append(rows[index][1]) |
||
574 | current_datetime_local += timedelta(minutes=1) |
||
575 | elif point['object_type'] == 'DIGITAL_VALUE': |
||
576 | query = (" SELECT utc_date_time, actual_value " |
||
577 | " FROM tbl_digital_value " |
||
578 | " WHERE point_id = %s " |
||
579 | " AND utc_date_time BETWEEN %s AND %s " |
||
580 | " ORDER BY utc_date_time ") |
||
581 | cursor_historical.execute(query, (point['id'], |
||
582 | reporting_start_datetime_utc, |
||
583 | reporting_end_datetime_utc)) |
||
584 | rows = cursor_historical.fetchall() |
||
585 | |||
586 | if rows is not None and len(rows) > 0: |
||
587 | reporting_start_datetime_local = reporting_start_datetime_utc.replace(tzinfo=timezone.utc) + \ |
||
588 | timedelta(minutes=timezone_offset) |
||
589 | current_datetime_local = reporting_start_datetime_local |
||
590 | |||
591 | while current_datetime_local < rows[0][0].replace(tzinfo=timezone.utc) + \ |
||
592 | timedelta(minutes=timezone_offset): |
||
593 | point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
||
594 | point_values.append(rows[0][1]) |
||
595 | current_datetime_local += timedelta(minutes=1) |
||
596 | |||
597 | for index in range(len(rows) - 1): |
||
598 | while current_datetime_local < rows[index + 1][0].replace(tzinfo=timezone.utc) + \ |
||
599 | timedelta(minutes=timezone_offset): |
||
600 | point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
||
601 | point_values.append(rows[index][1]) |
||
602 | current_datetime_local += timedelta(minutes=1) |
||
603 | |||
604 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
||
605 | parameters_data['timestamps'].append(point_timestamps) |
||
606 | parameters_data['values'].append(point_values) |
||
607 | |||
608 | if cursor_system: |
||
609 | cursor_system.close() |
||
610 | if cnx_system: |
||
611 | cnx_system.close() |
||
612 | |||
613 | if cursor_carbon: |
||
614 | cursor_carbon.close() |
||
615 | if cnx_carbon: |
||
616 | cnx_carbon.close() |
||
617 | |||
618 | if cursor_historical: |
||
619 | cursor_historical.close() |
||
620 | if cnx_historical: |
||
621 | cnx_historical.close() |
||
622 | ################################################################################################################ |
||
623 | # Step 14: construct the report |
||
624 | ################################################################################################################ |
||
625 | result = dict() |
||
626 | result['microgrid'] = meta_result |
||
627 | result['parameters'] = { |
||
628 | "names": parameters_data['names'], |
||
629 | "timestamps": parameters_data['timestamps'], |
||
630 | "values": parameters_data['values'] |
||
631 | } |
||
632 | result['base_period'] = dict() |
||
633 | result['base_period']['names'] = list() |
||
634 | result['base_period']['units'] = list() |
||
635 | result['base_period']['timestamps'] = list() |
||
636 | result['base_period']['values'] = list() |
||
637 | result['base_period']['subtotals'] = list() |
||
638 | |||
639 | if meter_base_list is not None and len(meter_base_list) > 0: |
||
640 | for meter_report in meter_base_list: |
||
641 | result['base_period']['names'].append(meter_report['name']) |
||
642 | result['base_period']['units'].append(meter_report['unit_of_measure']) |
||
643 | result['base_period']['timestamps'].append(meter_report['timestamps']) |
||
644 | result['base_period']['values'].append(meter_report['values']) |
||
645 | result['base_period']['subtotals'].append(meter_report['subtotal']) |
||
646 | |||
647 | result['reporting_period'] = dict() |
||
648 | result['reporting_period']['names'] = list() |
||
649 | result['reporting_period']['units'] = list() |
||
650 | result['reporting_period']['subtotals'] = list() |
||
651 | result['reporting_period']['increment_rates'] = list() |
||
652 | result['reporting_period']['timestamps'] = list() |
||
653 | result['reporting_period']['values'] = list() |
||
654 | |||
655 | if meter_reporting_list is not None and len(meter_reporting_list) > 0: |
||
656 | for meter_report in meter_reporting_list: |
||
657 | result['reporting_period']['names'].append(meter_report['name']) |
||
658 | result['reporting_period']['units'].append(meter_report['unit_of_measure']) |
||
659 | result['reporting_period']['timestamps'].append(meter_report['timestamps']) |
||
660 | result['reporting_period']['values'].append(meter_report['values']) |
||
661 | result['reporting_period']['subtotals'].append(meter_report['subtotal']) |
||
662 | |||
663 | # export result to Excel file and then encode the file to base64 string |
||
664 | if not is_quick_mode: |
||
665 | result['excel_bytes_base64'] = \ |
||
666 | excelexporters.microgridreportingcarbon.\ |
||
667 | export(result, |
||
668 | result['microgrid']['name'], |
||
669 | reporting_period_start_datetime_local, |
||
670 | reporting_period_end_datetime_local, |
||
671 | base_period_start_datetime_local, |
||
672 | base_period_end_datetime_local, |
||
673 | period_type, |
||
674 | language) |
||
675 | resp.text = json.dumps(result) |
||
676 |