@@ 13-627 (lines=615) @@ | ||
10 | from core.useractivity import access_control, api_key_control |
|
11 | ||
12 | ||
13 | class Reporting: |
|
14 | def __init__(self): |
|
15 | """"Initializes Reporting""" |
|
16 | pass |
|
17 | ||
18 | @staticmethod |
|
19 | def on_options(req, resp): |
|
20 | _ = req |
|
21 | resp.status = falcon.HTTP_200 |
|
22 | ||
23 | #################################################################################################################### |
|
24 | # PROCEDURES |
|
25 | # Step 1: valid parameters |
|
26 | # Step 2: query the shopfloor |
|
27 | # Step 3: query energy categories |
|
28 | # Step 4: query associated sensors |
|
29 | # Step 5: query associated points |
|
30 | # Step 6: query base period energy cost |
|
31 | # Step 7: query reporting period energy cost |
|
32 | # Step 8: query tariff data |
|
33 | # Step 9: query associated sensors and points data |
|
34 | # Step 10: construct the report |
|
35 | #################################################################################################################### |
|
36 | @staticmethod |
|
37 | def on_get(req, resp): |
|
38 | if 'API-KEY' not in req.headers or \ |
|
39 | not isinstance(req.headers['API-KEY'], str) or \ |
|
40 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
41 | access_control(req) |
|
42 | else: |
|
43 | api_key_control(req) |
|
44 | print(req.params) |
|
45 | shopfloor_id = req.params.get('shopfloorid') |
|
46 | shopfloor_uuid = req.params.get('shopflooruuid') |
|
47 | period_type = req.params.get('periodtype') |
|
48 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
49 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
50 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
51 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
52 | language = req.params.get('language') |
|
53 | quick_mode = req.params.get('quickmode') |
|
54 | ||
55 | ################################################################################################################ |
|
56 | # Step 1: valid parameters |
|
57 | ################################################################################################################ |
|
58 | if shopfloor_id is None and shopfloor_uuid is None: |
|
59 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
60 | title='API.BAD_REQUEST', |
|
61 | description='API.INVALID_SHOPFLOOR_ID') |
|
62 | ||
63 | if shopfloor_id is not None: |
|
64 | shopfloor_id = str.strip(shopfloor_id) |
|
65 | if not shopfloor_id.isdigit() or int(shopfloor_id) <= 0: |
|
66 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
67 | title='API.BAD_REQUEST', |
|
68 | description='API.INVALID_SHOPFLOOR_ID') |
|
69 | ||
70 | if shopfloor_uuid is not None: |
|
71 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
72 | match = regex.match(str.strip(shopfloor_uuid)) |
|
73 | if not bool(match): |
|
74 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
75 | title='API.BAD_REQUEST', |
|
76 | description='API.INVALID_SHOPFLOOR_UUID') |
|
77 | ||
78 | if period_type is None: |
|
79 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
80 | description='API.INVALID_PERIOD_TYPE') |
|
81 | else: |
|
82 | period_type = str.strip(period_type) |
|
83 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
84 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
85 | description='API.INVALID_PERIOD_TYPE') |
|
86 | ||
87 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
88 | if config.utc_offset[0] == '-': |
|
89 | timezone_offset = -timezone_offset |
|
90 | ||
91 | base_start_datetime_utc = None |
|
92 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
93 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
94 | try: |
|
95 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
96 | except ValueError: |
|
97 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
98 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
99 | base_start_datetime_utc = \ |
|
100 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
101 | # nomalize the start datetime |
|
102 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
103 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
104 | else: |
|
105 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
106 | ||
107 | base_end_datetime_utc = None |
|
108 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
109 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
110 | try: |
|
111 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
112 | except ValueError: |
|
113 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
114 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
115 | base_end_datetime_utc = \ |
|
116 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
117 | ||
118 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
119 | base_start_datetime_utc >= base_end_datetime_utc: |
|
120 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
121 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
122 | ||
123 | if reporting_period_start_datetime_local is None: |
|
124 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
125 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
126 | else: |
|
127 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
128 | try: |
|
129 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
130 | '%Y-%m-%dT%H:%M:%S') |
|
131 | except ValueError: |
|
132 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
133 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
134 | reporting_start_datetime_utc = \ |
|
135 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
136 | # nomalize the start datetime |
|
137 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
138 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
139 | else: |
|
140 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
141 | ||
142 | if reporting_period_end_datetime_local is None: |
|
143 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
144 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
145 | else: |
|
146 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
147 | try: |
|
148 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
149 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
150 | timedelta(minutes=timezone_offset) |
|
151 | except ValueError: |
|
152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
153 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
154 | ||
155 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
157 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
158 | ||
159 | # if turn quick mode on, do not return parameters data and excel file |
|
160 | is_quick_mode = False |
|
161 | if quick_mode is not None and \ |
|
162 | len(str.strip(quick_mode)) > 0 and \ |
|
163 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
164 | is_quick_mode = True |
|
165 | ||
166 | trans = utilities.get_translation(language) |
|
167 | trans.install() |
|
168 | _ = trans.gettext |
|
169 | ||
170 | ################################################################################################################ |
|
171 | # Step 2: query the shopfloor |
|
172 | ################################################################################################################ |
|
173 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
174 | cursor_system = cnx_system.cursor() |
|
175 | ||
176 | cnx_billing = mysql.connector.connect(**config.myems_billing_db) |
|
177 | cursor_billing = cnx_billing.cursor() |
|
178 | ||
179 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
180 | cursor_historical = cnx_historical.cursor() |
|
181 | ||
182 | if shopfloor_id is not None: |
|
183 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
184 | " FROM tbl_shopfloors " |
|
185 | " WHERE id = %s ", (shopfloor_id,)) |
|
186 | row_shopfloor = cursor_system.fetchone() |
|
187 | elif shopfloor_uuid is not None: |
|
188 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
189 | " FROM tbl_shopfloors " |
|
190 | " WHERE uuid = %s ", (shopfloor_uuid,)) |
|
191 | row_shopfloor = cursor_system.fetchone() |
|
192 | ||
193 | if row_shopfloor is None: |
|
194 | if cursor_system: |
|
195 | cursor_system.close() |
|
196 | if cnx_system: |
|
197 | cnx_system.close() |
|
198 | ||
199 | if cursor_billing: |
|
200 | cursor_billing.close() |
|
201 | if cnx_billing: |
|
202 | cnx_billing.close() |
|
203 | ||
204 | if cursor_historical: |
|
205 | cursor_historical.close() |
|
206 | if cnx_historical: |
|
207 | cnx_historical.close() |
|
208 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.SHOPFLOOR_NOT_FOUND') |
|
209 | ||
210 | shopfloor = dict() |
|
211 | shopfloor['id'] = row_shopfloor[0] |
|
212 | shopfloor['name'] = row_shopfloor[1] |
|
213 | shopfloor['area'] = row_shopfloor[2] |
|
214 | shopfloor['cost_center_id'] = row_shopfloor[3] |
|
215 | ||
216 | ################################################################################################################ |
|
217 | # Step 3: query energy categories |
|
218 | ################################################################################################################ |
|
219 | energy_category_set = set() |
|
220 | # query energy categories in base period |
|
221 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
|
222 | " FROM tbl_shopfloor_input_category_hourly " |
|
223 | " WHERE shopfloor_id = %s " |
|
224 | " AND start_datetime_utc >= %s " |
|
225 | " AND start_datetime_utc < %s ", |
|
226 | (shopfloor['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
227 | rows_energy_categories = cursor_billing.fetchall() |
|
228 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
229 | for row_energy_category in rows_energy_categories: |
|
230 | energy_category_set.add(row_energy_category[0]) |
|
231 | ||
232 | # query energy categories in reporting period |
|
233 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
|
234 | " FROM tbl_shopfloor_input_category_hourly " |
|
235 | " WHERE shopfloor_id = %s " |
|
236 | " AND start_datetime_utc >= %s " |
|
237 | " AND start_datetime_utc < %s ", |
|
238 | (shopfloor['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
239 | rows_energy_categories = cursor_billing.fetchall() |
|
240 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
241 | for row_energy_category in rows_energy_categories: |
|
242 | energy_category_set.add(row_energy_category[0]) |
|
243 | ||
244 | # query all energy categories in base period and reporting period |
|
245 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
246 | " FROM tbl_energy_categories " |
|
247 | " ORDER BY id ", ) |
|
248 | rows_energy_categories = cursor_system.fetchall() |
|
249 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
250 | if cursor_system: |
|
251 | cursor_system.close() |
|
252 | if cnx_system: |
|
253 | cnx_system.close() |
|
254 | ||
255 | if cursor_billing: |
|
256 | cursor_billing.close() |
|
257 | if cnx_billing: |
|
258 | cnx_billing.close() |
|
259 | ||
260 | if cursor_historical: |
|
261 | cursor_historical.close() |
|
262 | if cnx_historical: |
|
263 | cnx_historical.close() |
|
264 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
265 | title='API.NOT_FOUND', |
|
266 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
267 | energy_category_dict = dict() |
|
268 | for row_energy_category in rows_energy_categories: |
|
269 | if row_energy_category[0] in energy_category_set: |
|
270 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
271 | "unit_of_measure": row_energy_category[2], |
|
272 | "kgce": row_energy_category[3], |
|
273 | "kgco2e": row_energy_category[4]} |
|
274 | ||
275 | ################################################################################################################ |
|
276 | # Step 4: query associated sensors |
|
277 | ################################################################################################################ |
|
278 | point_list = list() |
|
279 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
280 | " FROM tbl_shopfloors st, tbl_sensors se, tbl_shopfloors_sensors ss, " |
|
281 | " tbl_points p, tbl_sensors_points sp " |
|
282 | " WHERE st.id = %s AND st.id = ss.shopfloor_id AND ss.sensor_id = se.id " |
|
283 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
|
284 | " ORDER BY p.id ", (shopfloor['id'],)) |
|
285 | rows_points = cursor_system.fetchall() |
|
286 | if rows_points is not None and len(rows_points) > 0: |
|
287 | for row in rows_points: |
|
288 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
289 | ||
290 | ################################################################################################################ |
|
291 | # Step 5: query associated points |
|
292 | ################################################################################################################ |
|
293 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
294 | " FROM tbl_shopfloors s, tbl_shopfloors_points sp, tbl_points p " |
|
295 | " WHERE s.id = %s AND s.id = sp.shopfloor_id AND sp.point_id = p.id " |
|
296 | " ORDER BY p.id ", (shopfloor['id'],)) |
|
297 | rows_points = cursor_system.fetchall() |
|
298 | if rows_points is not None and len(rows_points) > 0: |
|
299 | for row in rows_points: |
|
300 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
301 | ||
302 | ################################################################################################################ |
|
303 | # Step 6: query base period energy cost |
|
304 | ################################################################################################################ |
|
305 | base = dict() |
|
306 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
307 | for energy_category_id in energy_category_set: |
|
308 | base[energy_category_id] = dict() |
|
309 | base[energy_category_id]['timestamps'] = list() |
|
310 | base[energy_category_id]['values'] = list() |
|
311 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
312 | ||
313 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
|
314 | " FROM tbl_shopfloor_input_category_hourly " |
|
315 | " WHERE shopfloor_id = %s " |
|
316 | " AND energy_category_id = %s " |
|
317 | " AND start_datetime_utc >= %s " |
|
318 | " AND start_datetime_utc < %s " |
|
319 | " ORDER BY start_datetime_utc ", |
|
320 | (shopfloor['id'], |
|
321 | energy_category_id, |
|
322 | base_start_datetime_utc, |
|
323 | base_end_datetime_utc)) |
|
324 | rows_shopfloor_hourly = cursor_billing.fetchall() |
|
325 | ||
326 | rows_shopfloor_periodically = utilities.aggregate_hourly_data_by_period(rows_shopfloor_hourly, |
|
327 | base_start_datetime_utc, |
|
328 | base_end_datetime_utc, |
|
329 | period_type) |
|
330 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
|
331 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
332 | timedelta(minutes=timezone_offset) |
|
333 | if period_type == 'hourly': |
|
334 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
335 | elif period_type == 'daily': |
|
336 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
337 | elif period_type == 'weekly': |
|
338 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
339 | elif period_type == 'monthly': |
|
340 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
341 | elif period_type == 'yearly': |
|
342 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
343 | ||
344 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
|
345 | else row_shopfloor_periodically[1] |
|
346 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
347 | base[energy_category_id]['values'].append(actual_value) |
|
348 | base[energy_category_id]['subtotal'] += actual_value |
|
349 | ||
350 | ################################################################################################################ |
|
351 | # Step 7: query reporting period energy cost |
|
352 | ################################################################################################################ |
|
353 | reporting = dict() |
|
354 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
355 | for energy_category_id in energy_category_set: |
|
356 | reporting[energy_category_id] = dict() |
|
357 | reporting[energy_category_id]['timestamps'] = list() |
|
358 | reporting[energy_category_id]['values'] = list() |
|
359 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
360 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
|
361 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
|
362 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
|
363 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
|
364 | reporting[energy_category_id]['deep'] = Decimal(0.0) |
|
365 | ||
366 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
|
367 | " FROM tbl_shopfloor_input_category_hourly " |
|
368 | " WHERE shopfloor_id = %s " |
|
369 | " AND energy_category_id = %s " |
|
370 | " AND start_datetime_utc >= %s " |
|
371 | " AND start_datetime_utc < %s " |
|
372 | " ORDER BY start_datetime_utc ", |
|
373 | (shopfloor['id'], |
|
374 | energy_category_id, |
|
375 | reporting_start_datetime_utc, |
|
376 | reporting_end_datetime_utc)) |
|
377 | rows_shopfloor_hourly = cursor_billing.fetchall() |
|
378 | ||
379 | rows_shopfloor_periodically = utilities.aggregate_hourly_data_by_period(rows_shopfloor_hourly, |
|
380 | reporting_start_datetime_utc, |
|
381 | reporting_end_datetime_utc, |
|
382 | period_type) |
|
383 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
|
384 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
385 | timedelta(minutes=timezone_offset) |
|
386 | if period_type == 'hourly': |
|
387 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
388 | elif period_type == 'daily': |
|
389 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
390 | elif period_type == 'weekly': |
|
391 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
392 | elif period_type == 'monthly': |
|
393 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
394 | elif period_type == 'yearly': |
|
395 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
396 | ||
397 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
|
398 | else row_shopfloor_periodically[1] |
|
399 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
400 | reporting[energy_category_id]['values'].append(actual_value) |
|
401 | reporting[energy_category_id]['subtotal'] += actual_value |
|
402 | ||
403 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(shopfloor['cost_center_id'], |
|
404 | energy_category_id, |
|
405 | reporting_start_datetime_utc, |
|
406 | reporting_end_datetime_utc) |
|
407 | for row in rows_shopfloor_hourly: |
|
408 | peak_type = energy_category_tariff_dict.get(row[0], None) |
|
409 | if peak_type == 'toppeak': |
|
410 | reporting[energy_category_id]['toppeak'] += row[1] |
|
411 | elif peak_type == 'onpeak': |
|
412 | reporting[energy_category_id]['onpeak'] += row[1] |
|
413 | elif peak_type == 'midpeak': |
|
414 | reporting[energy_category_id]['midpeak'] += row[1] |
|
415 | elif peak_type == 'offpeak': |
|
416 | reporting[energy_category_id]['offpeak'] += row[1] |
|
417 | elif peak_type == 'deep': |
|
418 | reporting[energy_category_id]['deep'] += row[1] |
|
419 | ||
420 | ################################################################################################################ |
|
421 | # Step 8: query tariff data |
|
422 | ################################################################################################################ |
|
423 | parameters_data = dict() |
|
424 | parameters_data['names'] = list() |
|
425 | parameters_data['timestamps'] = list() |
|
426 | parameters_data['values'] = list() |
|
427 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
428 | and not is_quick_mode: |
|
429 | for energy_category_id in energy_category_set: |
|
430 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(shopfloor['cost_center_id'], |
|
431 | energy_category_id, |
|
432 | reporting_start_datetime_utc, |
|
433 | reporting_end_datetime_utc) |
|
434 | tariff_timestamp_list = list() |
|
435 | tariff_value_list = list() |
|
436 | for k, v in energy_category_tariff_dict.items(): |
|
437 | # convert k from utc to local |
|
438 | k = k + timedelta(minutes=timezone_offset) |
|
439 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
440 | tariff_value_list.append(v) |
|
441 | ||
442 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
443 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
444 | parameters_data['values'].append(tariff_value_list) |
|
445 | ||
446 | ################################################################################################################ |
|
447 | # Step 9: query associated sensors and points data |
|
448 | ################################################################################################################ |
|
449 | if not is_quick_mode: |
|
450 | for point in point_list: |
|
451 | point_values = [] |
|
452 | point_timestamps = [] |
|
453 | if point['object_type'] == 'ENERGY_VALUE': |
|
454 | query = (" SELECT utc_date_time, actual_value " |
|
455 | " FROM tbl_energy_value " |
|
456 | " WHERE point_id = %s " |
|
457 | " AND utc_date_time BETWEEN %s AND %s " |
|
458 | " ORDER BY utc_date_time ") |
|
459 | cursor_historical.execute(query, (point['id'], |
|
460 | reporting_start_datetime_utc, |
|
461 | reporting_end_datetime_utc)) |
|
462 | rows = cursor_historical.fetchall() |
|
463 | ||
464 | if rows is not None and len(rows) > 0: |
|
465 | for row in rows: |
|
466 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
467 | timedelta(minutes=timezone_offset) |
|
468 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
469 | point_timestamps.append(current_datetime) |
|
470 | point_values.append(row[1]) |
|
471 | elif point['object_type'] == 'ANALOG_VALUE': |
|
472 | query = (" SELECT utc_date_time, actual_value " |
|
473 | " FROM tbl_analog_value " |
|
474 | " WHERE point_id = %s " |
|
475 | " AND utc_date_time BETWEEN %s AND %s " |
|
476 | " ORDER BY utc_date_time ") |
|
477 | cursor_historical.execute(query, (point['id'], |
|
478 | reporting_start_datetime_utc, |
|
479 | reporting_end_datetime_utc)) |
|
480 | rows = cursor_historical.fetchall() |
|
481 | ||
482 | if rows is not None and len(rows) > 0: |
|
483 | for row in rows: |
|
484 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
485 | timedelta(minutes=timezone_offset) |
|
486 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
487 | point_timestamps.append(current_datetime) |
|
488 | point_values.append(row[1]) |
|
489 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
490 | query = (" SELECT utc_date_time, actual_value " |
|
491 | " FROM tbl_digital_value " |
|
492 | " WHERE point_id = %s " |
|
493 | " AND utc_date_time BETWEEN %s AND %s " |
|
494 | " ORDER BY utc_date_time ") |
|
495 | cursor_historical.execute(query, (point['id'], |
|
496 | reporting_start_datetime_utc, |
|
497 | reporting_end_datetime_utc)) |
|
498 | rows = cursor_historical.fetchall() |
|
499 | ||
500 | if rows is not None and len(rows) > 0: |
|
501 | for row in rows: |
|
502 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
503 | timedelta(minutes=timezone_offset) |
|
504 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
505 | point_timestamps.append(current_datetime) |
|
506 | point_values.append(row[1]) |
|
507 | ||
508 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
509 | parameters_data['timestamps'].append(point_timestamps) |
|
510 | parameters_data['values'].append(point_values) |
|
511 | ||
512 | ################################################################################################################ |
|
513 | # Step 10: construct the report |
|
514 | ################################################################################################################ |
|
515 | if cursor_system: |
|
516 | cursor_system.close() |
|
517 | if cnx_system: |
|
518 | cnx_system.close() |
|
519 | ||
520 | if cursor_billing: |
|
521 | cursor_billing.close() |
|
522 | if cnx_billing: |
|
523 | cnx_billing.close() |
|
524 | ||
525 | if cursor_historical: |
|
526 | cursor_historical.close() |
|
527 | if cnx_historical: |
|
528 | cnx_historical.close() |
|
529 | ||
530 | result = dict() |
|
531 | ||
532 | result['shopfloor'] = dict() |
|
533 | result['shopfloor']['name'] = shopfloor['name'] |
|
534 | result['shopfloor']['area'] = shopfloor['area'] |
|
535 | ||
536 | result['base_period'] = dict() |
|
537 | result['base_period']['names'] = list() |
|
538 | result['base_period']['units'] = list() |
|
539 | result['base_period']['timestamps'] = list() |
|
540 | result['base_period']['values'] = list() |
|
541 | result['base_period']['subtotals'] = list() |
|
542 | result['base_period']['total'] = Decimal(0.0) |
|
543 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
544 | for energy_category_id in energy_category_set: |
|
545 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
546 | result['base_period']['units'].append(config.currency_unit) |
|
547 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
548 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
549 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
550 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
|
551 | ||
552 | result['reporting_period'] = dict() |
|
553 | result['reporting_period']['names'] = list() |
|
554 | result['reporting_period']['energy_category_ids'] = list() |
|
555 | result['reporting_period']['units'] = list() |
|
556 | result['reporting_period']['timestamps'] = list() |
|
557 | result['reporting_period']['values'] = list() |
|
558 | result['reporting_period']['rates'] = list() |
|
559 | result['reporting_period']['subtotals'] = list() |
|
560 | result['reporting_period']['subtotals_per_unit_area'] = list() |
|
561 | result['reporting_period']['toppeaks'] = list() |
|
562 | result['reporting_period']['onpeaks'] = list() |
|
563 | result['reporting_period']['midpeaks'] = list() |
|
564 | result['reporting_period']['offpeaks'] = list() |
|
565 | result['reporting_period']['deeps'] = list() |
|
566 | result['reporting_period']['increment_rates'] = list() |
|
567 | result['reporting_period']['total'] = Decimal(0.0) |
|
568 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
|
569 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
|
570 | result['reporting_period']['total_unit'] = config.currency_unit |
|
571 | ||
572 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
573 | for energy_category_id in energy_category_set: |
|
574 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
575 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
576 | result['reporting_period']['units'].append(config.currency_unit) |
|
577 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
578 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
579 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
580 | result['reporting_period']['subtotals_per_unit_area'].append( |
|
581 | reporting[energy_category_id]['subtotal'] / shopfloor['area'] if shopfloor['area'] > 0.0 else None) |
|
582 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
|
583 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
|
584 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
|
585 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
|
586 | result['reporting_period']['deeps'].append(reporting[energy_category_id]['deep']) |
|
587 | result['reporting_period']['increment_rates'].append( |
|
588 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
|
589 | base[energy_category_id]['subtotal'] |
|
590 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
|
591 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
|
592 | ||
593 | rate = list() |
|
594 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
595 | if index < len(base[energy_category_id]['values']) \ |
|
596 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
597 | rate.append((value - base[energy_category_id]['values'][index]) |
|
598 | / base[energy_category_id]['values'][index]) |
|
599 | else: |
|
600 | rate.append(None) |
|
601 | result['reporting_period']['rates'].append(rate) |
|
602 | ||
603 | result['reporting_period']['total_per_unit_area'] = \ |
|
604 | result['reporting_period']['total'] / shopfloor['area'] if shopfloor['area'] > 0.0 else None |
|
605 | ||
606 | result['reporting_period']['total_increment_rate'] = \ |
|
607 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
|
608 | result['base_period']['total'] \ |
|
609 | if result['base_period']['total'] > Decimal(0.0) else None |
|
610 | ||
611 | result['parameters'] = { |
|
612 | "names": parameters_data['names'], |
|
613 | "timestamps": parameters_data['timestamps'], |
|
614 | "values": parameters_data['values'] |
|
615 | } |
|
616 | # export result to Excel file and then encode the file to base64 string |
|
617 | result['excel_bytes_base64'] = None |
|
618 | if not is_quick_mode: |
|
619 | result['excel_bytes_base64'] = excelexporters.shopfloorcost.export(result, |
|
620 | shopfloor['name'], |
|
621 | base_period_start_datetime_local, |
|
622 | base_period_end_datetime_local, |
|
623 | reporting_period_start_datetime_local, |
|
624 | reporting_period_end_datetime_local, |
|
625 | period_type, |
|
626 | language) |
|
627 | resp.text = json.dumps(result) |
|
628 |
@@ 13-627 (lines=615) @@ | ||
10 | from core.useractivity import access_control, api_key_control |
|
11 | ||
12 | ||
13 | class Reporting: |
|
14 | def __init__(self): |
|
15 | """"Initializes Reporting""" |
|
16 | pass |
|
17 | ||
18 | @staticmethod |
|
19 | def on_options(req, resp): |
|
20 | _ = req |
|
21 | resp.status = falcon.HTTP_200 |
|
22 | ||
23 | #################################################################################################################### |
|
24 | # PROCEDURES |
|
25 | # Step 1: valid parameters |
|
26 | # Step 2: query the shopfloor |
|
27 | # Step 3: query energy categories |
|
28 | # Step 4: query associated sensors |
|
29 | # Step 5: query associated points |
|
30 | # Step 6: query base period energy carbon dioxide emissions |
|
31 | # Step 7: query reporting period energy carbon dioxide emissions |
|
32 | # Step 8: query tariff data |
|
33 | # Step 9: query associated sensors and points data |
|
34 | # Step 10: construct the report |
|
35 | #################################################################################################################### |
|
36 | @staticmethod |
|
37 | def on_get(req, resp): |
|
38 | if 'API-KEY' not in req.headers or \ |
|
39 | not isinstance(req.headers['API-KEY'], str) or \ |
|
40 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
41 | access_control(req) |
|
42 | else: |
|
43 | api_key_control(req) |
|
44 | print(req.params) |
|
45 | shopfloor_id = req.params.get('shopfloorid') |
|
46 | shopfloor_uuid = req.params.get('shopflooruuid') |
|
47 | period_type = req.params.get('periodtype') |
|
48 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
49 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
50 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
51 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
52 | language = req.params.get('language') |
|
53 | quick_mode = req.params.get('quickmode') |
|
54 | ||
55 | ################################################################################################################ |
|
56 | # Step 1: valid parameters |
|
57 | ################################################################################################################ |
|
58 | if shopfloor_id is None and shopfloor_uuid is None: |
|
59 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
60 | title='API.BAD_REQUEST', |
|
61 | description='API.INVALID_SHOPFLOOR_ID') |
|
62 | ||
63 | if shopfloor_id is not None: |
|
64 | shopfloor_id = str.strip(shopfloor_id) |
|
65 | if not shopfloor_id.isdigit() or int(shopfloor_id) <= 0: |
|
66 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
67 | title='API.BAD_REQUEST', |
|
68 | description='API.INVALID_SHOPFLOOR_ID') |
|
69 | ||
70 | if shopfloor_uuid is not None: |
|
71 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
72 | match = regex.match(str.strip(shopfloor_uuid)) |
|
73 | if not bool(match): |
|
74 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
75 | title='API.BAD_REQUEST', |
|
76 | description='API.INVALID_SHOPFLOOR_UUID') |
|
77 | ||
78 | if period_type is None: |
|
79 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
80 | description='API.INVALID_PERIOD_TYPE') |
|
81 | else: |
|
82 | period_type = str.strip(period_type) |
|
83 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
84 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
85 | description='API.INVALID_PERIOD_TYPE') |
|
86 | ||
87 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
88 | if config.utc_offset[0] == '-': |
|
89 | timezone_offset = -timezone_offset |
|
90 | ||
91 | base_start_datetime_utc = None |
|
92 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
93 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
94 | try: |
|
95 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
96 | except ValueError: |
|
97 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
98 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
99 | base_start_datetime_utc = \ |
|
100 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
101 | # nomalize the start datetime |
|
102 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
103 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
104 | else: |
|
105 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
106 | ||
107 | base_end_datetime_utc = None |
|
108 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
109 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
110 | try: |
|
111 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
112 | except ValueError: |
|
113 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
114 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
115 | base_end_datetime_utc = \ |
|
116 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
117 | ||
118 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
119 | base_start_datetime_utc >= base_end_datetime_utc: |
|
120 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
121 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
122 | ||
123 | if reporting_period_start_datetime_local is None: |
|
124 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
125 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
126 | else: |
|
127 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
128 | try: |
|
129 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
130 | '%Y-%m-%dT%H:%M:%S') |
|
131 | except ValueError: |
|
132 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
133 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
134 | reporting_start_datetime_utc = \ |
|
135 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
136 | # nomalize the start datetime |
|
137 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
138 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
139 | else: |
|
140 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
141 | ||
142 | if reporting_period_end_datetime_local is None: |
|
143 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
144 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
145 | else: |
|
146 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
147 | try: |
|
148 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
149 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
150 | timedelta(minutes=timezone_offset) |
|
151 | except ValueError: |
|
152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
153 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
154 | ||
155 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
157 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
158 | ||
159 | # if turn quick mode on, do not return parameters data and excel file |
|
160 | is_quick_mode = False |
|
161 | if quick_mode is not None and \ |
|
162 | len(str.strip(quick_mode)) > 0 and \ |
|
163 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
164 | is_quick_mode = True |
|
165 | ||
166 | trans = utilities.get_translation(language) |
|
167 | trans.install() |
|
168 | _ = trans.gettext |
|
169 | ||
170 | ################################################################################################################ |
|
171 | # Step 2: query the shopfloor |
|
172 | ################################################################################################################ |
|
173 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
174 | cursor_system = cnx_system.cursor() |
|
175 | ||
176 | cnx_carbon = mysql.connector.connect(**config.myems_carbon_db) |
|
177 | cursor_carbon = cnx_carbon.cursor() |
|
178 | ||
179 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
180 | cursor_historical = cnx_historical.cursor() |
|
181 | ||
182 | if shopfloor_id is not None: |
|
183 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
184 | " FROM tbl_shopfloors " |
|
185 | " WHERE id = %s ", (shopfloor_id,)) |
|
186 | row_shopfloor = cursor_system.fetchone() |
|
187 | elif shopfloor_uuid is not None: |
|
188 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
189 | " FROM tbl_shopfloors " |
|
190 | " WHERE uuid = %s ", (shopfloor_uuid,)) |
|
191 | row_shopfloor = cursor_system.fetchone() |
|
192 | ||
193 | if row_shopfloor is None: |
|
194 | if cursor_system: |
|
195 | cursor_system.close() |
|
196 | if cnx_system: |
|
197 | cnx_system.close() |
|
198 | ||
199 | if cursor_carbon: |
|
200 | cursor_carbon.close() |
|
201 | if cnx_carbon: |
|
202 | cnx_carbon.close() |
|
203 | ||
204 | if cursor_historical: |
|
205 | cursor_historical.close() |
|
206 | if cnx_historical: |
|
207 | cnx_historical.close() |
|
208 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.SHOPFLOOR_NOT_FOUND') |
|
209 | ||
210 | shopfloor = dict() |
|
211 | shopfloor['id'] = row_shopfloor[0] |
|
212 | shopfloor['name'] = row_shopfloor[1] |
|
213 | shopfloor['area'] = row_shopfloor[2] |
|
214 | shopfloor['cost_center_id'] = row_shopfloor[3] |
|
215 | ||
216 | ################################################################################################################ |
|
217 | # Step 3: query energy categories |
|
218 | ################################################################################################################ |
|
219 | energy_category_set = set() |
|
220 | # query energy categories in base period |
|
221 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
|
222 | " FROM tbl_shopfloor_input_category_hourly " |
|
223 | " WHERE shopfloor_id = %s " |
|
224 | " AND start_datetime_utc >= %s " |
|
225 | " AND start_datetime_utc < %s ", |
|
226 | (shopfloor['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
227 | rows_energy_categories = cursor_carbon.fetchall() |
|
228 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
229 | for row_energy_category in rows_energy_categories: |
|
230 | energy_category_set.add(row_energy_category[0]) |
|
231 | ||
232 | # query energy categories in reporting period |
|
233 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
|
234 | " FROM tbl_shopfloor_input_category_hourly " |
|
235 | " WHERE shopfloor_id = %s " |
|
236 | " AND start_datetime_utc >= %s " |
|
237 | " AND start_datetime_utc < %s ", |
|
238 | (shopfloor['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
239 | rows_energy_categories = cursor_carbon.fetchall() |
|
240 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
241 | for row_energy_category in rows_energy_categories: |
|
242 | energy_category_set.add(row_energy_category[0]) |
|
243 | ||
244 | # query all energy categories in base period and reporting period |
|
245 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
246 | " FROM tbl_energy_categories " |
|
247 | " ORDER BY id ", ) |
|
248 | rows_energy_categories = cursor_system.fetchall() |
|
249 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
250 | if cursor_system: |
|
251 | cursor_system.close() |
|
252 | if cnx_system: |
|
253 | cnx_system.close() |
|
254 | ||
255 | if cursor_carbon: |
|
256 | cursor_carbon.close() |
|
257 | if cnx_carbon: |
|
258 | cnx_carbon.close() |
|
259 | ||
260 | if cursor_historical: |
|
261 | cursor_historical.close() |
|
262 | if cnx_historical: |
|
263 | cnx_historical.close() |
|
264 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
265 | title='API.NOT_FOUND', |
|
266 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
267 | energy_category_dict = dict() |
|
268 | for row_energy_category in rows_energy_categories: |
|
269 | if row_energy_category[0] in energy_category_set: |
|
270 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
271 | "unit_of_measure": row_energy_category[2], |
|
272 | "kgce": row_energy_category[3], |
|
273 | "kgco2e": row_energy_category[4]} |
|
274 | ||
275 | ################################################################################################################ |
|
276 | # Step 4: query associated sensors |
|
277 | ################################################################################################################ |
|
278 | point_list = list() |
|
279 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
280 | " FROM tbl_shopfloors st, tbl_sensors se, tbl_shopfloors_sensors ss, " |
|
281 | " tbl_points p, tbl_sensors_points sp " |
|
282 | " WHERE st.id = %s AND st.id = ss.shopfloor_id AND ss.sensor_id = se.id " |
|
283 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
|
284 | " ORDER BY p.id ", (shopfloor['id'],)) |
|
285 | rows_points = cursor_system.fetchall() |
|
286 | if rows_points is not None and len(rows_points) > 0: |
|
287 | for row in rows_points: |
|
288 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
289 | ||
290 | ################################################################################################################ |
|
291 | # Step 5: query associated points |
|
292 | ################################################################################################################ |
|
293 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
294 | " FROM tbl_shopfloors s, tbl_shopfloors_points sp, tbl_points p " |
|
295 | " WHERE s.id = %s AND s.id = sp.shopfloor_id AND sp.point_id = p.id " |
|
296 | " ORDER BY p.id ", (shopfloor['id'],)) |
|
297 | rows_points = cursor_system.fetchall() |
|
298 | if rows_points is not None and len(rows_points) > 0: |
|
299 | for row in rows_points: |
|
300 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
301 | ||
302 | ################################################################################################################ |
|
303 | # Step 6: query base period energy carbon dioxide emissions |
|
304 | ################################################################################################################ |
|
305 | base = dict() |
|
306 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
307 | for energy_category_id in energy_category_set: |
|
308 | base[energy_category_id] = dict() |
|
309 | base[energy_category_id]['timestamps'] = list() |
|
310 | base[energy_category_id]['values'] = list() |
|
311 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
312 | ||
313 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
|
314 | " FROM tbl_shopfloor_input_category_hourly " |
|
315 | " WHERE shopfloor_id = %s " |
|
316 | " AND energy_category_id = %s " |
|
317 | " AND start_datetime_utc >= %s " |
|
318 | " AND start_datetime_utc < %s " |
|
319 | " ORDER BY start_datetime_utc ", |
|
320 | (shopfloor['id'], |
|
321 | energy_category_id, |
|
322 | base_start_datetime_utc, |
|
323 | base_end_datetime_utc)) |
|
324 | rows_shopfloor_hourly = cursor_carbon.fetchall() |
|
325 | ||
326 | rows_shopfloor_periodically = utilities.aggregate_hourly_data_by_period(rows_shopfloor_hourly, |
|
327 | base_start_datetime_utc, |
|
328 | base_end_datetime_utc, |
|
329 | period_type) |
|
330 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
|
331 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
332 | timedelta(minutes=timezone_offset) |
|
333 | if period_type == 'hourly': |
|
334 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
335 | elif period_type == 'daily': |
|
336 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
337 | elif period_type == 'weekly': |
|
338 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
339 | elif period_type == 'monthly': |
|
340 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
341 | elif period_type == 'yearly': |
|
342 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
343 | ||
344 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
|
345 | else row_shopfloor_periodically[1] |
|
346 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
347 | base[energy_category_id]['values'].append(actual_value) |
|
348 | base[energy_category_id]['subtotal'] += actual_value |
|
349 | ||
350 | ################################################################################################################ |
|
351 | # Step 7: query reporting period energy carbon dioxide emissions |
|
352 | ################################################################################################################ |
|
353 | reporting = dict() |
|
354 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
355 | for energy_category_id in energy_category_set: |
|
356 | reporting[energy_category_id] = dict() |
|
357 | reporting[energy_category_id]['timestamps'] = list() |
|
358 | reporting[energy_category_id]['values'] = list() |
|
359 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
360 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
|
361 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
|
362 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
|
363 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
|
364 | reporting[energy_category_id]['deep'] = Decimal(0.0) |
|
365 | ||
366 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
|
367 | " FROM tbl_shopfloor_input_category_hourly " |
|
368 | " WHERE shopfloor_id = %s " |
|
369 | " AND energy_category_id = %s " |
|
370 | " AND start_datetime_utc >= %s " |
|
371 | " AND start_datetime_utc < %s " |
|
372 | " ORDER BY start_datetime_utc ", |
|
373 | (shopfloor['id'], |
|
374 | energy_category_id, |
|
375 | reporting_start_datetime_utc, |
|
376 | reporting_end_datetime_utc)) |
|
377 | rows_shopfloor_hourly = cursor_carbon.fetchall() |
|
378 | ||
379 | rows_shopfloor_periodically = utilities.aggregate_hourly_data_by_period(rows_shopfloor_hourly, |
|
380 | reporting_start_datetime_utc, |
|
381 | reporting_end_datetime_utc, |
|
382 | period_type) |
|
383 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
|
384 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
385 | timedelta(minutes=timezone_offset) |
|
386 | if period_type == 'hourly': |
|
387 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
388 | elif period_type == 'daily': |
|
389 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
390 | elif period_type == 'weekly': |
|
391 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
392 | elif period_type == 'monthly': |
|
393 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
394 | elif period_type == 'yearly': |
|
395 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
396 | ||
397 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
|
398 | else row_shopfloor_periodically[1] |
|
399 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
400 | reporting[energy_category_id]['values'].append(actual_value) |
|
401 | reporting[energy_category_id]['subtotal'] += actual_value |
|
402 | ||
403 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(shopfloor['cost_center_id'], |
|
404 | energy_category_id, |
|
405 | reporting_start_datetime_utc, |
|
406 | reporting_end_datetime_utc) |
|
407 | for row in rows_shopfloor_hourly: |
|
408 | peak_type = energy_category_tariff_dict.get(row[0], None) |
|
409 | if peak_type == 'toppeak': |
|
410 | reporting[energy_category_id]['toppeak'] += row[1] |
|
411 | elif peak_type == 'onpeak': |
|
412 | reporting[energy_category_id]['onpeak'] += row[1] |
|
413 | elif peak_type == 'midpeak': |
|
414 | reporting[energy_category_id]['midpeak'] += row[1] |
|
415 | elif peak_type == 'offpeak': |
|
416 | reporting[energy_category_id]['offpeak'] += row[1] |
|
417 | elif peak_type == 'deep': |
|
418 | reporting[energy_category_id]['deep'] += row[1] |
|
419 | ||
420 | ################################################################################################################ |
|
421 | # Step 8: query tariff data |
|
422 | ################################################################################################################ |
|
423 | parameters_data = dict() |
|
424 | parameters_data['names'] = list() |
|
425 | parameters_data['timestamps'] = list() |
|
426 | parameters_data['values'] = list() |
|
427 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
428 | and not is_quick_mode: |
|
429 | for energy_category_id in energy_category_set: |
|
430 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(shopfloor['cost_center_id'], |
|
431 | energy_category_id, |
|
432 | reporting_start_datetime_utc, |
|
433 | reporting_end_datetime_utc) |
|
434 | tariff_timestamp_list = list() |
|
435 | tariff_value_list = list() |
|
436 | for k, v in energy_category_tariff_dict.items(): |
|
437 | # convert k from utc to local |
|
438 | k = k + timedelta(minutes=timezone_offset) |
|
439 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
440 | tariff_value_list.append(v) |
|
441 | ||
442 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
443 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
444 | parameters_data['values'].append(tariff_value_list) |
|
445 | ||
446 | ################################################################################################################ |
|
447 | # Step 9: query associated sensors and points data |
|
448 | ################################################################################################################ |
|
449 | if not is_quick_mode: |
|
450 | for point in point_list: |
|
451 | point_values = [] |
|
452 | point_timestamps = [] |
|
453 | if point['object_type'] == 'ENERGY_VALUE': |
|
454 | query = (" SELECT utc_date_time, actual_value " |
|
455 | " FROM tbl_energy_value " |
|
456 | " WHERE point_id = %s " |
|
457 | " AND utc_date_time BETWEEN %s AND %s " |
|
458 | " ORDER BY utc_date_time ") |
|
459 | cursor_historical.execute(query, (point['id'], |
|
460 | reporting_start_datetime_utc, |
|
461 | reporting_end_datetime_utc)) |
|
462 | rows = cursor_historical.fetchall() |
|
463 | ||
464 | if rows is not None and len(rows) > 0: |
|
465 | for row in rows: |
|
466 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
467 | timedelta(minutes=timezone_offset) |
|
468 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
469 | point_timestamps.append(current_datetime) |
|
470 | point_values.append(row[1]) |
|
471 | elif point['object_type'] == 'ANALOG_VALUE': |
|
472 | query = (" SELECT utc_date_time, actual_value " |
|
473 | " FROM tbl_analog_value " |
|
474 | " WHERE point_id = %s " |
|
475 | " AND utc_date_time BETWEEN %s AND %s " |
|
476 | " ORDER BY utc_date_time ") |
|
477 | cursor_historical.execute(query, (point['id'], |
|
478 | reporting_start_datetime_utc, |
|
479 | reporting_end_datetime_utc)) |
|
480 | rows = cursor_historical.fetchall() |
|
481 | ||
482 | if rows is not None and len(rows) > 0: |
|
483 | for row in rows: |
|
484 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
485 | timedelta(minutes=timezone_offset) |
|
486 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
487 | point_timestamps.append(current_datetime) |
|
488 | point_values.append(row[1]) |
|
489 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
490 | query = (" SELECT utc_date_time, actual_value " |
|
491 | " FROM tbl_digital_value " |
|
492 | " WHERE point_id = %s " |
|
493 | " AND utc_date_time BETWEEN %s AND %s " |
|
494 | " ORDER BY utc_date_time ") |
|
495 | cursor_historical.execute(query, (point['id'], |
|
496 | reporting_start_datetime_utc, |
|
497 | reporting_end_datetime_utc)) |
|
498 | rows = cursor_historical.fetchall() |
|
499 | ||
500 | if rows is not None and len(rows) > 0: |
|
501 | for row in rows: |
|
502 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
503 | timedelta(minutes=timezone_offset) |
|
504 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
505 | point_timestamps.append(current_datetime) |
|
506 | point_values.append(row[1]) |
|
507 | ||
508 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
509 | parameters_data['timestamps'].append(point_timestamps) |
|
510 | parameters_data['values'].append(point_values) |
|
511 | ||
512 | ################################################################################################################ |
|
513 | # Step 10: construct the report |
|
514 | ################################################################################################################ |
|
515 | if cursor_system: |
|
516 | cursor_system.close() |
|
517 | if cnx_system: |
|
518 | cnx_system.close() |
|
519 | ||
520 | if cursor_carbon: |
|
521 | cursor_carbon.close() |
|
522 | if cnx_carbon: |
|
523 | cnx_carbon.close() |
|
524 | ||
525 | if cursor_historical: |
|
526 | cursor_historical.close() |
|
527 | if cnx_historical: |
|
528 | cnx_historical.close() |
|
529 | ||
530 | result = dict() |
|
531 | ||
532 | result['shopfloor'] = dict() |
|
533 | result['shopfloor']['name'] = shopfloor['name'] |
|
534 | result['shopfloor']['area'] = shopfloor['area'] |
|
535 | ||
536 | result['base_period'] = dict() |
|
537 | result['base_period']['names'] = list() |
|
538 | result['base_period']['units'] = list() |
|
539 | result['base_period']['timestamps'] = list() |
|
540 | result['base_period']['values'] = list() |
|
541 | result['base_period']['subtotals'] = list() |
|
542 | result['base_period']['total'] = Decimal(0.0) |
|
543 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
544 | for energy_category_id in energy_category_set: |
|
545 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
546 | result['base_period']['units'].append('KG') |
|
547 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
548 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
549 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
550 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
|
551 | ||
552 | result['reporting_period'] = dict() |
|
553 | result['reporting_period']['names'] = list() |
|
554 | result['reporting_period']['energy_category_ids'] = list() |
|
555 | result['reporting_period']['units'] = list() |
|
556 | result['reporting_period']['timestamps'] = list() |
|
557 | result['reporting_period']['values'] = list() |
|
558 | result['reporting_period']['rates'] = list() |
|
559 | result['reporting_period']['subtotals'] = list() |
|
560 | result['reporting_period']['subtotals_per_unit_area'] = list() |
|
561 | result['reporting_period']['toppeaks'] = list() |
|
562 | result['reporting_period']['onpeaks'] = list() |
|
563 | result['reporting_period']['midpeaks'] = list() |
|
564 | result['reporting_period']['offpeaks'] = list() |
|
565 | result['reporting_period']['deeps'] = list() |
|
566 | result['reporting_period']['increment_rates'] = list() |
|
567 | result['reporting_period']['total'] = Decimal(0.0) |
|
568 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
|
569 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
|
570 | result['reporting_period']['total_unit'] = 'KG' |
|
571 | ||
572 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
573 | for energy_category_id in energy_category_set: |
|
574 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
575 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
576 | result['reporting_period']['units'].append('KG') |
|
577 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
578 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
579 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
580 | result['reporting_period']['subtotals_per_unit_area'].append( |
|
581 | reporting[energy_category_id]['subtotal'] / shopfloor['area'] if shopfloor['area'] > 0.0 else None) |
|
582 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
|
583 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
|
584 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
|
585 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
|
586 | result['reporting_period']['deeps'].append(reporting[energy_category_id]['deep']) |
|
587 | result['reporting_period']['increment_rates'].append( |
|
588 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
|
589 | base[energy_category_id]['subtotal'] |
|
590 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
|
591 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
|
592 | ||
593 | rate = list() |
|
594 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
595 | if index < len(base[energy_category_id]['values']) \ |
|
596 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
597 | rate.append((value - base[energy_category_id]['values'][index]) |
|
598 | / base[energy_category_id]['values'][index]) |
|
599 | else: |
|
600 | rate.append(None) |
|
601 | result['reporting_period']['rates'].append(rate) |
|
602 | ||
603 | result['reporting_period']['total_per_unit_area'] = \ |
|
604 | result['reporting_period']['total'] / shopfloor['area'] if shopfloor['area'] > 0.0 else None |
|
605 | ||
606 | result['reporting_period']['total_increment_rate'] = \ |
|
607 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
|
608 | result['base_period']['total'] \ |
|
609 | if result['base_period']['total'] > Decimal(0.0) else None |
|
610 | ||
611 | result['parameters'] = { |
|
612 | "names": parameters_data['names'], |
|
613 | "timestamps": parameters_data['timestamps'], |
|
614 | "values": parameters_data['values'] |
|
615 | } |
|
616 | # export result to Excel file and then encode the file to base64 string |
|
617 | result['excel_bytes_base64'] = None |
|
618 | if not is_quick_mode: |
|
619 | result['excel_bytes_base64'] = excelexporters.shopfloorcarbon.export(result, |
|
620 | shopfloor['name'], |
|
621 | base_period_start_datetime_local, |
|
622 | base_period_end_datetime_local, |
|
623 | reporting_period_start_datetime_local, |
|
624 | reporting_period_end_datetime_local, |
|
625 | period_type, |
|
626 | language) |
|
627 | resp.text = json.dumps(result) |
|
628 |
@@ 13-626 (lines=614) @@ | ||
10 | from core.useractivity import access_control, api_key_control |
|
11 | ||
12 | ||
13 | class Reporting: |
|
14 | def __init__(self): |
|
15 | """"Initializes Reporting""" |
|
16 | pass |
|
17 | ||
18 | @staticmethod |
|
19 | def on_options(req, resp): |
|
20 | _ = req |
|
21 | resp.status = falcon.HTTP_200 |
|
22 | ||
23 | #################################################################################################################### |
|
24 | # PROCEDURES |
|
25 | # Step 1: valid parameters |
|
26 | # Step 2: query the tenant |
|
27 | # Step 3: query energy categories |
|
28 | # Step 4: query associated sensors |
|
29 | # Step 5: query associated points |
|
30 | # Step 6: query base period energy carbon dioxide emissions |
|
31 | # Step 7: query reporting period energy carbon dioxide emissions |
|
32 | # Step 8: query tariff data |
|
33 | # Step 9: query associated sensors and points data |
|
34 | # Step 10: construct the report |
|
35 | #################################################################################################################### |
|
36 | @staticmethod |
|
37 | def on_get(req, resp): |
|
38 | if 'API-KEY' not in req.headers or \ |
|
39 | not isinstance(req.headers['API-KEY'], str) or \ |
|
40 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
41 | access_control(req) |
|
42 | else: |
|
43 | api_key_control(req) |
|
44 | print(req.params) |
|
45 | tenant_id = req.params.get('tenantid') |
|
46 | tenant_uuid = req.params.get('tenantuuid') |
|
47 | period_type = req.params.get('periodtype') |
|
48 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
49 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
50 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
51 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
52 | language = req.params.get('language') |
|
53 | quick_mode = req.params.get('quickmode') |
|
54 | ||
55 | ################################################################################################################ |
|
56 | # Step 1: valid parameters |
|
57 | ################################################################################################################ |
|
58 | if tenant_id is None and tenant_uuid is None: |
|
59 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
60 | title='API.BAD_REQUEST', |
|
61 | description='API.INVALID_TENANT_ID') |
|
62 | ||
63 | if tenant_id is not None: |
|
64 | tenant_id = str.strip(tenant_id) |
|
65 | if not tenant_id.isdigit() or int(tenant_id) <= 0: |
|
66 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
67 | title='API.BAD_REQUEST', |
|
68 | description='API.INVALID_TENANT_ID') |
|
69 | ||
70 | if tenant_uuid is not None: |
|
71 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
72 | match = regex.match(str.strip(tenant_uuid)) |
|
73 | if not bool(match): |
|
74 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
75 | title='API.BAD_REQUEST', |
|
76 | description='API.INVALID_TENANT_UUID') |
|
77 | ||
78 | if period_type is None: |
|
79 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
80 | description='API.INVALID_PERIOD_TYPE') |
|
81 | else: |
|
82 | period_type = str.strip(period_type) |
|
83 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
84 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
85 | description='API.INVALID_PERIOD_TYPE') |
|
86 | ||
87 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
88 | if config.utc_offset[0] == '-': |
|
89 | timezone_offset = -timezone_offset |
|
90 | ||
91 | base_start_datetime_utc = None |
|
92 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
93 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
94 | try: |
|
95 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
96 | except ValueError: |
|
97 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
98 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
99 | base_start_datetime_utc = \ |
|
100 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
101 | # nomalize the start datetime |
|
102 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
103 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
104 | else: |
|
105 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
106 | ||
107 | base_end_datetime_utc = None |
|
108 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
109 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
110 | try: |
|
111 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
112 | except ValueError: |
|
113 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
114 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
115 | base_end_datetime_utc = \ |
|
116 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
117 | ||
118 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
119 | base_start_datetime_utc >= base_end_datetime_utc: |
|
120 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
121 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
122 | ||
123 | if reporting_period_start_datetime_local is None: |
|
124 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
125 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
126 | else: |
|
127 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
128 | try: |
|
129 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
130 | '%Y-%m-%dT%H:%M:%S') |
|
131 | except ValueError: |
|
132 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
133 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
134 | reporting_start_datetime_utc = \ |
|
135 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
136 | # nomalize the start datetime |
|
137 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
138 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
139 | else: |
|
140 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
141 | ||
142 | if reporting_period_end_datetime_local is None: |
|
143 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
144 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
145 | else: |
|
146 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
147 | try: |
|
148 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
149 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
150 | timedelta(minutes=timezone_offset) |
|
151 | except ValueError: |
|
152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
153 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
154 | ||
155 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
157 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
158 | ||
159 | # if turn quick mode on, do not return parameters data and excel file |
|
160 | is_quick_mode = False |
|
161 | if quick_mode is not None and \ |
|
162 | len(str.strip(quick_mode)) > 0 and \ |
|
163 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
164 | is_quick_mode = True |
|
165 | ||
166 | trans = utilities.get_translation(language) |
|
167 | trans.install() |
|
168 | _ = trans.gettext |
|
169 | ||
170 | ################################################################################################################ |
|
171 | # Step 2: query the tenant |
|
172 | ################################################################################################################ |
|
173 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
174 | cursor_system = cnx_system.cursor() |
|
175 | ||
176 | cnx_carbon = mysql.connector.connect(**config.myems_carbon_db) |
|
177 | cursor_carbon = cnx_carbon.cursor() |
|
178 | ||
179 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
180 | cursor_historical = cnx_historical.cursor() |
|
181 | ||
182 | if tenant_id is not None: |
|
183 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
184 | " FROM tbl_tenants " |
|
185 | " WHERE id = %s ", (tenant_id,)) |
|
186 | row_tenant = cursor_system.fetchone() |
|
187 | elif tenant_uuid is not None: |
|
188 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
189 | " FROM tbl_tenants " |
|
190 | " WHERE uuid = %s ", (tenant_uuid,)) |
|
191 | row_tenant = cursor_system.fetchone() |
|
192 | ||
193 | if row_tenant is None: |
|
194 | if cursor_system: |
|
195 | cursor_system.close() |
|
196 | if cnx_system: |
|
197 | cnx_system.close() |
|
198 | ||
199 | if cursor_carbon: |
|
200 | cursor_carbon.close() |
|
201 | if cnx_carbon: |
|
202 | cnx_carbon.close() |
|
203 | ||
204 | if cursor_historical: |
|
205 | cursor_historical.close() |
|
206 | if cnx_historical: |
|
207 | cnx_historical.close() |
|
208 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.TENANT_NOT_FOUND') |
|
209 | ||
210 | tenant = dict() |
|
211 | tenant['id'] = row_tenant[0] |
|
212 | tenant['name'] = row_tenant[1] |
|
213 | tenant['area'] = row_tenant[2] |
|
214 | tenant['cost_center_id'] = row_tenant[3] |
|
215 | ||
216 | ################################################################################################################ |
|
217 | # Step 3: query energy categories |
|
218 | ################################################################################################################ |
|
219 | energy_category_set = set() |
|
220 | # query energy categories in base period |
|
221 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
|
222 | " FROM tbl_tenant_input_category_hourly " |
|
223 | " WHERE tenant_id = %s " |
|
224 | " AND start_datetime_utc >= %s " |
|
225 | " AND start_datetime_utc < %s ", |
|
226 | (tenant['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
227 | rows_energy_categories = cursor_carbon.fetchall() |
|
228 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
229 | for row_energy_category in rows_energy_categories: |
|
230 | energy_category_set.add(row_energy_category[0]) |
|
231 | ||
232 | # query energy categories in reporting period |
|
233 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
|
234 | " FROM tbl_tenant_input_category_hourly " |
|
235 | " WHERE tenant_id = %s " |
|
236 | " AND start_datetime_utc >= %s " |
|
237 | " AND start_datetime_utc < %s ", |
|
238 | (tenant['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
239 | rows_energy_categories = cursor_carbon.fetchall() |
|
240 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
241 | for row_energy_category in rows_energy_categories: |
|
242 | energy_category_set.add(row_energy_category[0]) |
|
243 | ||
244 | # query all energy categories in base period and reporting period |
|
245 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
246 | " FROM tbl_energy_categories " |
|
247 | " ORDER BY id ", ) |
|
248 | rows_energy_categories = cursor_system.fetchall() |
|
249 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
250 | if cursor_system: |
|
251 | cursor_system.close() |
|
252 | if cnx_system: |
|
253 | cnx_system.close() |
|
254 | ||
255 | if cursor_carbon: |
|
256 | cursor_carbon.close() |
|
257 | if cnx_carbon: |
|
258 | cnx_carbon.close() |
|
259 | ||
260 | if cursor_historical: |
|
261 | cursor_historical.close() |
|
262 | if cnx_historical: |
|
263 | cnx_historical.close() |
|
264 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
265 | title='API.NOT_FOUND', |
|
266 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
267 | energy_category_dict = dict() |
|
268 | for row_energy_category in rows_energy_categories: |
|
269 | if row_energy_category[0] in energy_category_set: |
|
270 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
271 | "unit_of_measure": row_energy_category[2], |
|
272 | "kgce": row_energy_category[3], |
|
273 | "kgco2e": row_energy_category[4]} |
|
274 | ||
275 | ################################################################################################################ |
|
276 | # Step 4: query associated sensors |
|
277 | ################################################################################################################ |
|
278 | point_list = list() |
|
279 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
280 | " FROM tbl_tenants t, tbl_sensors s, tbl_tenants_sensors ts, " |
|
281 | " tbl_points p, tbl_sensors_points sp " |
|
282 | " WHERE t.id = %s AND t.id = ts.tenant_id AND ts.sensor_id = s.id " |
|
283 | " AND s.id = sp.sensor_id AND sp.point_id = p.id " |
|
284 | " ORDER BY p.id ", (tenant['id'],)) |
|
285 | rows_points = cursor_system.fetchall() |
|
286 | if rows_points is not None and len(rows_points) > 0: |
|
287 | for row in rows_points: |
|
288 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
289 | ||
290 | ################################################################################################################ |
|
291 | # Step 5: query associated points |
|
292 | ################################################################################################################ |
|
293 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
294 | " FROM tbl_tenants t, tbl_tenants_points tp, tbl_points p " |
|
295 | " WHERE t.id = %s AND t.id = tp.tenant_id AND tp.point_id = p.id " |
|
296 | " ORDER BY p.id ", (tenant['id'],)) |
|
297 | rows_points = cursor_system.fetchall() |
|
298 | if rows_points is not None and len(rows_points) > 0: |
|
299 | for row in rows_points: |
|
300 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
301 | ||
302 | ################################################################################################################ |
|
303 | # Step 6: query base period energy carbon dioxide emissions |
|
304 | ################################################################################################################ |
|
305 | base = dict() |
|
306 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
307 | for energy_category_id in energy_category_set: |
|
308 | base[energy_category_id] = dict() |
|
309 | base[energy_category_id]['timestamps'] = list() |
|
310 | base[energy_category_id]['values'] = list() |
|
311 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
312 | ||
313 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
|
314 | " FROM tbl_tenant_input_category_hourly " |
|
315 | " WHERE tenant_id = %s " |
|
316 | " AND energy_category_id = %s " |
|
317 | " AND start_datetime_utc >= %s " |
|
318 | " AND start_datetime_utc < %s " |
|
319 | " ORDER BY start_datetime_utc ", |
|
320 | (tenant['id'], |
|
321 | energy_category_id, |
|
322 | base_start_datetime_utc, |
|
323 | base_end_datetime_utc)) |
|
324 | rows_tenant_hourly = cursor_carbon.fetchall() |
|
325 | ||
326 | rows_tenant_periodically = utilities.aggregate_hourly_data_by_period(rows_tenant_hourly, |
|
327 | base_start_datetime_utc, |
|
328 | base_end_datetime_utc, |
|
329 | period_type) |
|
330 | for row_tenant_periodically in rows_tenant_periodically: |
|
331 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
332 | timedelta(minutes=timezone_offset) |
|
333 | if period_type == 'hourly': |
|
334 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
335 | elif period_type == 'daily': |
|
336 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
337 | elif period_type == 'weekly': |
|
338 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
339 | elif period_type == 'monthly': |
|
340 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
341 | elif period_type == 'yearly': |
|
342 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
343 | ||
344 | actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] |
|
345 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
346 | base[energy_category_id]['values'].append(actual_value) |
|
347 | base[energy_category_id]['subtotal'] += actual_value |
|
348 | ||
349 | ################################################################################################################ |
|
350 | # Step 7: query reporting period energy carbon dioxide emissions |
|
351 | ################################################################################################################ |
|
352 | reporting = dict() |
|
353 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
354 | for energy_category_id in energy_category_set: |
|
355 | reporting[energy_category_id] = dict() |
|
356 | reporting[energy_category_id]['timestamps'] = list() |
|
357 | reporting[energy_category_id]['values'] = list() |
|
358 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
359 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
|
360 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
|
361 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
|
362 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
|
363 | reporting[energy_category_id]['deep'] = Decimal(0.0) |
|
364 | ||
365 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
|
366 | " FROM tbl_tenant_input_category_hourly " |
|
367 | " WHERE tenant_id = %s " |
|
368 | " AND energy_category_id = %s " |
|
369 | " AND start_datetime_utc >= %s " |
|
370 | " AND start_datetime_utc < %s " |
|
371 | " ORDER BY start_datetime_utc ", |
|
372 | (tenant['id'], |
|
373 | energy_category_id, |
|
374 | reporting_start_datetime_utc, |
|
375 | reporting_end_datetime_utc)) |
|
376 | rows_tenant_hourly = cursor_carbon.fetchall() |
|
377 | ||
378 | rows_tenant_periodically = utilities.aggregate_hourly_data_by_period(rows_tenant_hourly, |
|
379 | reporting_start_datetime_utc, |
|
380 | reporting_end_datetime_utc, |
|
381 | period_type) |
|
382 | for row_tenant_periodically in rows_tenant_periodically: |
|
383 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
384 | timedelta(minutes=timezone_offset) |
|
385 | if period_type == 'hourly': |
|
386 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
387 | elif period_type == 'daily': |
|
388 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
389 | elif period_type == 'weekly': |
|
390 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
391 | elif period_type == 'monthly': |
|
392 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
393 | elif period_type == 'yearly': |
|
394 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
395 | ||
396 | actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] |
|
397 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
398 | reporting[energy_category_id]['values'].append(actual_value) |
|
399 | reporting[energy_category_id]['subtotal'] += actual_value |
|
400 | ||
401 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(tenant['cost_center_id'], |
|
402 | energy_category_id, |
|
403 | reporting_start_datetime_utc, |
|
404 | reporting_end_datetime_utc) |
|
405 | for row in rows_tenant_hourly: |
|
406 | peak_type = energy_category_tariff_dict.get(row[0], None) |
|
407 | if peak_type == 'toppeak': |
|
408 | reporting[energy_category_id]['toppeak'] += row[1] |
|
409 | elif peak_type == 'onpeak': |
|
410 | reporting[energy_category_id]['onpeak'] += row[1] |
|
411 | elif peak_type == 'midpeak': |
|
412 | reporting[energy_category_id]['midpeak'] += row[1] |
|
413 | elif peak_type == 'offpeak': |
|
414 | reporting[energy_category_id]['offpeak'] += row[1] |
|
415 | elif peak_type == 'deep': |
|
416 | reporting[energy_category_id]['deep'] += row[1] |
|
417 | ||
418 | ################################################################################################################ |
|
419 | # Step 8: query tariff data |
|
420 | ################################################################################################################ |
|
421 | parameters_data = dict() |
|
422 | parameters_data['names'] = list() |
|
423 | parameters_data['timestamps'] = list() |
|
424 | parameters_data['values'] = list() |
|
425 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
426 | and not is_quick_mode: |
|
427 | for energy_category_id in energy_category_set: |
|
428 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(tenant['cost_center_id'], |
|
429 | energy_category_id, |
|
430 | reporting_start_datetime_utc, |
|
431 | reporting_end_datetime_utc) |
|
432 | tariff_timestamp_list = list() |
|
433 | tariff_value_list = list() |
|
434 | for k, v in energy_category_tariff_dict.items(): |
|
435 | # convert k from utc to local |
|
436 | k = k + timedelta(minutes=timezone_offset) |
|
437 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
438 | tariff_value_list.append(v) |
|
439 | ||
440 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
441 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
442 | parameters_data['values'].append(tariff_value_list) |
|
443 | ||
444 | ################################################################################################################ |
|
445 | # Step 9: query associated sensors and points data |
|
446 | ################################################################################################################ |
|
447 | if not is_quick_mode: |
|
448 | for point in point_list: |
|
449 | point_values = [] |
|
450 | point_timestamps = [] |
|
451 | if point['object_type'] == 'ENERGY_VALUE': |
|
452 | query = (" SELECT utc_date_time, actual_value " |
|
453 | " FROM tbl_energy_value " |
|
454 | " WHERE point_id = %s " |
|
455 | " AND utc_date_time BETWEEN %s AND %s " |
|
456 | " ORDER BY utc_date_time ") |
|
457 | cursor_historical.execute(query, (point['id'], |
|
458 | reporting_start_datetime_utc, |
|
459 | reporting_end_datetime_utc)) |
|
460 | rows = cursor_historical.fetchall() |
|
461 | ||
462 | if rows is not None and len(rows) > 0: |
|
463 | for row in rows: |
|
464 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
465 | timedelta(minutes=timezone_offset) |
|
466 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
467 | point_timestamps.append(current_datetime) |
|
468 | point_values.append(row[1]) |
|
469 | elif point['object_type'] == 'ANALOG_VALUE': |
|
470 | query = (" SELECT utc_date_time, actual_value " |
|
471 | " FROM tbl_analog_value " |
|
472 | " WHERE point_id = %s " |
|
473 | " AND utc_date_time BETWEEN %s AND %s " |
|
474 | " ORDER BY utc_date_time ") |
|
475 | cursor_historical.execute(query, (point['id'], |
|
476 | reporting_start_datetime_utc, |
|
477 | reporting_end_datetime_utc)) |
|
478 | rows = cursor_historical.fetchall() |
|
479 | ||
480 | if rows is not None and len(rows) > 0: |
|
481 | for row in rows: |
|
482 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
483 | timedelta(minutes=timezone_offset) |
|
484 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
485 | point_timestamps.append(current_datetime) |
|
486 | point_values.append(row[1]) |
|
487 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
488 | query = (" SELECT utc_date_time, actual_value " |
|
489 | " FROM tbl_digital_value " |
|
490 | " WHERE point_id = %s " |
|
491 | " AND utc_date_time BETWEEN %s AND %s " |
|
492 | " ORDER BY utc_date_time ") |
|
493 | cursor_historical.execute(query, (point['id'], |
|
494 | reporting_start_datetime_utc, |
|
495 | reporting_end_datetime_utc)) |
|
496 | rows = cursor_historical.fetchall() |
|
497 | ||
498 | if rows is not None and len(rows) > 0: |
|
499 | for row in rows: |
|
500 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
501 | timedelta(minutes=timezone_offset) |
|
502 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
503 | point_timestamps.append(current_datetime) |
|
504 | point_values.append(row[1]) |
|
505 | ||
506 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
507 | parameters_data['timestamps'].append(point_timestamps) |
|
508 | parameters_data['values'].append(point_values) |
|
509 | ||
510 | ################################################################################################################ |
|
511 | # Step 10: construct the report |
|
512 | ################################################################################################################ |
|
513 | if cursor_system: |
|
514 | cursor_system.close() |
|
515 | if cnx_system: |
|
516 | cnx_system.close() |
|
517 | ||
518 | if cursor_carbon: |
|
519 | cursor_carbon.close() |
|
520 | if cnx_carbon: |
|
521 | cnx_carbon.close() |
|
522 | ||
523 | if cursor_historical: |
|
524 | cursor_historical.close() |
|
525 | if cnx_historical: |
|
526 | cnx_historical.close() |
|
527 | ||
528 | result = dict() |
|
529 | ||
530 | result['tenant'] = dict() |
|
531 | result['tenant']['name'] = tenant['name'] |
|
532 | result['tenant']['area'] = tenant['area'] |
|
533 | ||
534 | result['base_period'] = dict() |
|
535 | result['base_period']['names'] = list() |
|
536 | result['base_period']['units'] = list() |
|
537 | result['base_period']['timestamps'] = list() |
|
538 | result['base_period']['values'] = list() |
|
539 | result['base_period']['subtotals'] = list() |
|
540 | result['base_period']['total'] = Decimal(0.0) |
|
541 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
542 | for energy_category_id in energy_category_set: |
|
543 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
544 | result['base_period']['units'].append('KG') |
|
545 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
546 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
547 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
548 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
|
549 | ||
550 | result['reporting_period'] = dict() |
|
551 | result['reporting_period']['names'] = list() |
|
552 | result['reporting_period']['energy_category_ids'] = list() |
|
553 | result['reporting_period']['units'] = list() |
|
554 | result['reporting_period']['timestamps'] = list() |
|
555 | result['reporting_period']['values'] = list() |
|
556 | result['reporting_period']['rates'] = list() |
|
557 | result['reporting_period']['subtotals'] = list() |
|
558 | result['reporting_period']['subtotals_per_unit_area'] = list() |
|
559 | result['reporting_period']['toppeaks'] = list() |
|
560 | result['reporting_period']['onpeaks'] = list() |
|
561 | result['reporting_period']['midpeaks'] = list() |
|
562 | result['reporting_period']['offpeaks'] = list() |
|
563 | result['reporting_period']['deeps'] = list() |
|
564 | result['reporting_period']['increment_rates'] = list() |
|
565 | result['reporting_period']['total'] = Decimal(0.0) |
|
566 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
|
567 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
|
568 | result['reporting_period']['total_unit'] = 'KG' |
|
569 | ||
570 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
571 | for energy_category_id in energy_category_set: |
|
572 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
573 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
574 | result['reporting_period']['units'].append('KG') |
|
575 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
576 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
577 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
578 | result['reporting_period']['subtotals_per_unit_area'].append( |
|
579 | reporting[energy_category_id]['subtotal'] / tenant['area'] if tenant['area'] > 0.0 else None) |
|
580 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
|
581 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
|
582 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
|
583 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
|
584 | result['reporting_period']['deeps'].append(reporting[energy_category_id]['deep']) |
|
585 | result['reporting_period']['increment_rates'].append( |
|
586 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
|
587 | base[energy_category_id]['subtotal'] |
|
588 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
|
589 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
|
590 | ||
591 | rate = list() |
|
592 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
593 | if index < len(base[energy_category_id]['values']) \ |
|
594 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
595 | rate.append((value - base[energy_category_id]['values'][index]) |
|
596 | / base[energy_category_id]['values'][index]) |
|
597 | else: |
|
598 | rate.append(None) |
|
599 | result['reporting_period']['rates'].append(rate) |
|
600 | ||
601 | result['reporting_period']['total_per_unit_area'] = \ |
|
602 | result['reporting_period']['total'] / tenant['area'] if tenant['area'] > 0.0 else None |
|
603 | ||
604 | result['reporting_period']['total_increment_rate'] = \ |
|
605 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
|
606 | result['base_period']['total'] \ |
|
607 | if result['base_period']['total'] > Decimal(0.0) else None |
|
608 | ||
609 | result['parameters'] = { |
|
610 | "names": parameters_data['names'], |
|
611 | "timestamps": parameters_data['timestamps'], |
|
612 | "values": parameters_data['values'] |
|
613 | } |
|
614 | ||
615 | # export result to Excel file and then encode the file to base64 string |
|
616 | if not is_quick_mode: |
|
617 | result['excel_bytes_base64'] = excelexporters.tenantcarbon.export(result, |
|
618 | tenant['name'], |
|
619 | base_period_start_datetime_local, |
|
620 | base_period_end_datetime_local, |
|
621 | reporting_period_start_datetime_local, |
|
622 | reporting_period_end_datetime_local, |
|
623 | period_type, |
|
624 | language) |
|
625 | ||
626 | resp.text = json.dumps(result) |
|
627 |
@@ 13-626 (lines=614) @@ | ||
10 | from core.useractivity import access_control, api_key_control |
|
11 | ||
12 | ||
13 | class Reporting: |
|
14 | def __init__(self): |
|
15 | """"Initializes Reporting""" |
|
16 | pass |
|
17 | ||
18 | @staticmethod |
|
19 | def on_options(req, resp): |
|
20 | _ = req |
|
21 | resp.status = falcon.HTTP_200 |
|
22 | ||
23 | #################################################################################################################### |
|
24 | # PROCEDURES |
|
25 | # Step 1: valid parameters |
|
26 | # Step 2: query the tenant |
|
27 | # Step 3: query energy categories |
|
28 | # Step 4: query associated sensors |
|
29 | # Step 5: query associated points |
|
30 | # Step 6: query base period energy cost |
|
31 | # Step 7: query reporting period energy cost |
|
32 | # Step 8: query tariff data |
|
33 | # Step 9: query associated sensors and points data |
|
34 | # Step 10: construct the report |
|
35 | #################################################################################################################### |
|
36 | @staticmethod |
|
37 | def on_get(req, resp): |
|
38 | if 'API-KEY' not in req.headers or \ |
|
39 | not isinstance(req.headers['API-KEY'], str) or \ |
|
40 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
41 | access_control(req) |
|
42 | else: |
|
43 | api_key_control(req) |
|
44 | print(req.params) |
|
45 | tenant_id = req.params.get('tenantid') |
|
46 | tenant_uuid = req.params.get('tenantuuid') |
|
47 | period_type = req.params.get('periodtype') |
|
48 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
49 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
50 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
51 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
52 | language = req.params.get('language') |
|
53 | quick_mode = req.params.get('quickmode') |
|
54 | ||
55 | ################################################################################################################ |
|
56 | # Step 1: valid parameters |
|
57 | ################################################################################################################ |
|
58 | if tenant_id is None and tenant_uuid is None: |
|
59 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
60 | title='API.BAD_REQUEST', |
|
61 | description='API.INVALID_TENANT_ID') |
|
62 | ||
63 | if tenant_id is not None: |
|
64 | tenant_id = str.strip(tenant_id) |
|
65 | if not tenant_id.isdigit() or int(tenant_id) <= 0: |
|
66 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
67 | title='API.BAD_REQUEST', |
|
68 | description='API.INVALID_TENANT_ID') |
|
69 | ||
70 | if tenant_uuid is not None: |
|
71 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
72 | match = regex.match(str.strip(tenant_uuid)) |
|
73 | if not bool(match): |
|
74 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
75 | title='API.BAD_REQUEST', |
|
76 | description='API.INVALID_TENANT_UUID') |
|
77 | ||
78 | if period_type is None: |
|
79 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
80 | description='API.INVALID_PERIOD_TYPE') |
|
81 | else: |
|
82 | period_type = str.strip(period_type) |
|
83 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
84 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
85 | description='API.INVALID_PERIOD_TYPE') |
|
86 | ||
87 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
88 | if config.utc_offset[0] == '-': |
|
89 | timezone_offset = -timezone_offset |
|
90 | ||
91 | base_start_datetime_utc = None |
|
92 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
93 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
94 | try: |
|
95 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
96 | except ValueError: |
|
97 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
98 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
99 | base_start_datetime_utc = \ |
|
100 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
101 | # nomalize the start datetime |
|
102 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
103 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
104 | else: |
|
105 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
106 | ||
107 | base_end_datetime_utc = None |
|
108 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
109 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
110 | try: |
|
111 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
112 | except ValueError: |
|
113 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
114 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
115 | base_end_datetime_utc = \ |
|
116 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
117 | ||
118 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
119 | base_start_datetime_utc >= base_end_datetime_utc: |
|
120 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
121 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
122 | ||
123 | if reporting_period_start_datetime_local is None: |
|
124 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
125 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
126 | else: |
|
127 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
128 | try: |
|
129 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
130 | '%Y-%m-%dT%H:%M:%S') |
|
131 | except ValueError: |
|
132 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
133 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
134 | reporting_start_datetime_utc = \ |
|
135 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
136 | # nomalize the start datetime |
|
137 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
138 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
139 | else: |
|
140 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
141 | ||
142 | if reporting_period_end_datetime_local is None: |
|
143 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
144 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
145 | else: |
|
146 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
147 | try: |
|
148 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
149 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
150 | timedelta(minutes=timezone_offset) |
|
151 | except ValueError: |
|
152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
153 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
154 | ||
155 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
157 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
158 | ||
159 | # if turn quick mode on, do not return parameters data and excel file |
|
160 | is_quick_mode = False |
|
161 | if quick_mode is not None and \ |
|
162 | len(str.strip(quick_mode)) > 0 and \ |
|
163 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
164 | is_quick_mode = True |
|
165 | ||
166 | trans = utilities.get_translation(language) |
|
167 | trans.install() |
|
168 | _ = trans.gettext |
|
169 | ||
170 | ################################################################################################################ |
|
171 | # Step 2: query the tenant |
|
172 | ################################################################################################################ |
|
173 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
174 | cursor_system = cnx_system.cursor() |
|
175 | ||
176 | cnx_billing = mysql.connector.connect(**config.myems_billing_db) |
|
177 | cursor_billing = cnx_billing.cursor() |
|
178 | ||
179 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
180 | cursor_historical = cnx_historical.cursor() |
|
181 | ||
182 | if tenant_id is not None: |
|
183 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
184 | " FROM tbl_tenants " |
|
185 | " WHERE id = %s ", (tenant_id,)) |
|
186 | row_tenant = cursor_system.fetchone() |
|
187 | elif tenant_uuid is not None: |
|
188 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
189 | " FROM tbl_tenants " |
|
190 | " WHERE uuid = %s ", (tenant_uuid,)) |
|
191 | row_tenant = cursor_system.fetchone() |
|
192 | ||
193 | if row_tenant is None: |
|
194 | if cursor_system: |
|
195 | cursor_system.close() |
|
196 | if cnx_system: |
|
197 | cnx_system.close() |
|
198 | ||
199 | if cursor_billing: |
|
200 | cursor_billing.close() |
|
201 | if cnx_billing: |
|
202 | cnx_billing.close() |
|
203 | ||
204 | if cursor_historical: |
|
205 | cursor_historical.close() |
|
206 | if cnx_historical: |
|
207 | cnx_historical.close() |
|
208 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.TENANT_NOT_FOUND') |
|
209 | ||
210 | tenant = dict() |
|
211 | tenant['id'] = row_tenant[0] |
|
212 | tenant['name'] = row_tenant[1] |
|
213 | tenant['area'] = row_tenant[2] |
|
214 | tenant['cost_center_id'] = row_tenant[3] |
|
215 | ||
216 | ################################################################################################################ |
|
217 | # Step 3: query energy categories |
|
218 | ################################################################################################################ |
|
219 | energy_category_set = set() |
|
220 | # query energy categories in base period |
|
221 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
|
222 | " FROM tbl_tenant_input_category_hourly " |
|
223 | " WHERE tenant_id = %s " |
|
224 | " AND start_datetime_utc >= %s " |
|
225 | " AND start_datetime_utc < %s ", |
|
226 | (tenant['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
227 | rows_energy_categories = cursor_billing.fetchall() |
|
228 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
229 | for row_energy_category in rows_energy_categories: |
|
230 | energy_category_set.add(row_energy_category[0]) |
|
231 | ||
232 | # query energy categories in reporting period |
|
233 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
|
234 | " FROM tbl_tenant_input_category_hourly " |
|
235 | " WHERE tenant_id = %s " |
|
236 | " AND start_datetime_utc >= %s " |
|
237 | " AND start_datetime_utc < %s ", |
|
238 | (tenant['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
239 | rows_energy_categories = cursor_billing.fetchall() |
|
240 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
241 | for row_energy_category in rows_energy_categories: |
|
242 | energy_category_set.add(row_energy_category[0]) |
|
243 | ||
244 | # query all energy categories in base period and reporting period |
|
245 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
246 | " FROM tbl_energy_categories " |
|
247 | " ORDER BY id ", ) |
|
248 | rows_energy_categories = cursor_system.fetchall() |
|
249 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
250 | if cursor_system: |
|
251 | cursor_system.close() |
|
252 | if cnx_system: |
|
253 | cnx_system.close() |
|
254 | ||
255 | if cursor_billing: |
|
256 | cursor_billing.close() |
|
257 | if cnx_billing: |
|
258 | cnx_billing.close() |
|
259 | ||
260 | if cursor_historical: |
|
261 | cursor_historical.close() |
|
262 | if cnx_historical: |
|
263 | cnx_historical.close() |
|
264 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
265 | title='API.NOT_FOUND', |
|
266 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
267 | energy_category_dict = dict() |
|
268 | for row_energy_category in rows_energy_categories: |
|
269 | if row_energy_category[0] in energy_category_set: |
|
270 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
271 | "unit_of_measure": row_energy_category[2], |
|
272 | "kgce": row_energy_category[3], |
|
273 | "kgco2e": row_energy_category[4]} |
|
274 | ||
275 | ################################################################################################################ |
|
276 | # Step 4: query associated sensors |
|
277 | ################################################################################################################ |
|
278 | point_list = list() |
|
279 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
280 | " FROM tbl_tenants t, tbl_sensors s, tbl_tenants_sensors ts, " |
|
281 | " tbl_points p, tbl_sensors_points sp " |
|
282 | " WHERE t.id = %s AND t.id = ts.tenant_id AND ts.sensor_id = s.id " |
|
283 | " AND s.id = sp.sensor_id AND sp.point_id = p.id " |
|
284 | " ORDER BY p.id ", (tenant['id'],)) |
|
285 | rows_points = cursor_system.fetchall() |
|
286 | if rows_points is not None and len(rows_points) > 0: |
|
287 | for row in rows_points: |
|
288 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
289 | ||
290 | ################################################################################################################ |
|
291 | # Step 5: query associated points |
|
292 | ################################################################################################################ |
|
293 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
294 | " FROM tbl_tenants t, tbl_tenants_points tp, tbl_points p " |
|
295 | " WHERE t.id = %s AND t.id = tp.tenant_id AND tp.point_id = p.id " |
|
296 | " ORDER BY p.id ", (tenant['id'],)) |
|
297 | rows_points = cursor_system.fetchall() |
|
298 | if rows_points is not None and len(rows_points) > 0: |
|
299 | for row in rows_points: |
|
300 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
301 | ||
302 | ################################################################################################################ |
|
303 | # Step 6: query base period energy cost |
|
304 | ################################################################################################################ |
|
305 | base = dict() |
|
306 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
307 | for energy_category_id in energy_category_set: |
|
308 | base[energy_category_id] = dict() |
|
309 | base[energy_category_id]['timestamps'] = list() |
|
310 | base[energy_category_id]['values'] = list() |
|
311 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
312 | ||
313 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
|
314 | " FROM tbl_tenant_input_category_hourly " |
|
315 | " WHERE tenant_id = %s " |
|
316 | " AND energy_category_id = %s " |
|
317 | " AND start_datetime_utc >= %s " |
|
318 | " AND start_datetime_utc < %s " |
|
319 | " ORDER BY start_datetime_utc ", |
|
320 | (tenant['id'], |
|
321 | energy_category_id, |
|
322 | base_start_datetime_utc, |
|
323 | base_end_datetime_utc)) |
|
324 | rows_tenant_hourly = cursor_billing.fetchall() |
|
325 | ||
326 | rows_tenant_periodically = utilities.aggregate_hourly_data_by_period(rows_tenant_hourly, |
|
327 | base_start_datetime_utc, |
|
328 | base_end_datetime_utc, |
|
329 | period_type) |
|
330 | for row_tenant_periodically in rows_tenant_periodically: |
|
331 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
332 | timedelta(minutes=timezone_offset) |
|
333 | if period_type == 'hourly': |
|
334 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
335 | elif period_type == 'daily': |
|
336 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
337 | elif period_type == 'weekly': |
|
338 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
339 | elif period_type == 'monthly': |
|
340 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
341 | elif period_type == 'yearly': |
|
342 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
343 | ||
344 | actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] |
|
345 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
346 | base[energy_category_id]['values'].append(actual_value) |
|
347 | base[energy_category_id]['subtotal'] += actual_value |
|
348 | ||
349 | ################################################################################################################ |
|
350 | # Step 7: query reporting period energy cost |
|
351 | ################################################################################################################ |
|
352 | reporting = dict() |
|
353 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
354 | for energy_category_id in energy_category_set: |
|
355 | reporting[energy_category_id] = dict() |
|
356 | reporting[energy_category_id]['timestamps'] = list() |
|
357 | reporting[energy_category_id]['values'] = list() |
|
358 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
359 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
|
360 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
|
361 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
|
362 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
|
363 | reporting[energy_category_id]['deep'] = Decimal(0.0) |
|
364 | ||
365 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
|
366 | " FROM tbl_tenant_input_category_hourly " |
|
367 | " WHERE tenant_id = %s " |
|
368 | " AND energy_category_id = %s " |
|
369 | " AND start_datetime_utc >= %s " |
|
370 | " AND start_datetime_utc < %s " |
|
371 | " ORDER BY start_datetime_utc ", |
|
372 | (tenant['id'], |
|
373 | energy_category_id, |
|
374 | reporting_start_datetime_utc, |
|
375 | reporting_end_datetime_utc)) |
|
376 | rows_tenant_hourly = cursor_billing.fetchall() |
|
377 | ||
378 | rows_tenant_periodically = utilities.aggregate_hourly_data_by_period(rows_tenant_hourly, |
|
379 | reporting_start_datetime_utc, |
|
380 | reporting_end_datetime_utc, |
|
381 | period_type) |
|
382 | for row_tenant_periodically in rows_tenant_periodically: |
|
383 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
384 | timedelta(minutes=timezone_offset) |
|
385 | if period_type == 'hourly': |
|
386 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
387 | elif period_type == 'daily': |
|
388 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
389 | elif period_type == 'weekly': |
|
390 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
391 | elif period_type == 'monthly': |
|
392 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
393 | elif period_type == 'yearly': |
|
394 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
395 | ||
396 | actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] |
|
397 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
398 | reporting[energy_category_id]['values'].append(actual_value) |
|
399 | reporting[energy_category_id]['subtotal'] += actual_value |
|
400 | ||
401 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(tenant['cost_center_id'], |
|
402 | energy_category_id, |
|
403 | reporting_start_datetime_utc, |
|
404 | reporting_end_datetime_utc) |
|
405 | for row in rows_tenant_hourly: |
|
406 | peak_type = energy_category_tariff_dict.get(row[0], None) |
|
407 | if peak_type == 'toppeak': |
|
408 | reporting[energy_category_id]['toppeak'] += row[1] |
|
409 | elif peak_type == 'onpeak': |
|
410 | reporting[energy_category_id]['onpeak'] += row[1] |
|
411 | elif peak_type == 'midpeak': |
|
412 | reporting[energy_category_id]['midpeak'] += row[1] |
|
413 | elif peak_type == 'offpeak': |
|
414 | reporting[energy_category_id]['offpeak'] += row[1] |
|
415 | elif peak_type == 'deep': |
|
416 | reporting[energy_category_id]['deep'] += row[1] |
|
417 | ||
418 | ################################################################################################################ |
|
419 | # Step 8: query tariff data |
|
420 | ################################################################################################################ |
|
421 | parameters_data = dict() |
|
422 | parameters_data['names'] = list() |
|
423 | parameters_data['timestamps'] = list() |
|
424 | parameters_data['values'] = list() |
|
425 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
426 | and not is_quick_mode: |
|
427 | for energy_category_id in energy_category_set: |
|
428 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(tenant['cost_center_id'], |
|
429 | energy_category_id, |
|
430 | reporting_start_datetime_utc, |
|
431 | reporting_end_datetime_utc) |
|
432 | tariff_timestamp_list = list() |
|
433 | tariff_value_list = list() |
|
434 | for k, v in energy_category_tariff_dict.items(): |
|
435 | # convert k from utc to local |
|
436 | k = k + timedelta(minutes=timezone_offset) |
|
437 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
438 | tariff_value_list.append(v) |
|
439 | ||
440 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
441 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
442 | parameters_data['values'].append(tariff_value_list) |
|
443 | ||
444 | ################################################################################################################ |
|
445 | # Step 9: query associated sensors and points data |
|
446 | ################################################################################################################ |
|
447 | if not is_quick_mode: |
|
448 | for point in point_list: |
|
449 | point_values = [] |
|
450 | point_timestamps = [] |
|
451 | if point['object_type'] == 'ENERGY_VALUE': |
|
452 | query = (" SELECT utc_date_time, actual_value " |
|
453 | " FROM tbl_energy_value " |
|
454 | " WHERE point_id = %s " |
|
455 | " AND utc_date_time BETWEEN %s AND %s " |
|
456 | " ORDER BY utc_date_time ") |
|
457 | cursor_historical.execute(query, (point['id'], |
|
458 | reporting_start_datetime_utc, |
|
459 | reporting_end_datetime_utc)) |
|
460 | rows = cursor_historical.fetchall() |
|
461 | ||
462 | if rows is not None and len(rows) > 0: |
|
463 | for row in rows: |
|
464 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
465 | timedelta(minutes=timezone_offset) |
|
466 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
467 | point_timestamps.append(current_datetime) |
|
468 | point_values.append(row[1]) |
|
469 | elif point['object_type'] == 'ANALOG_VALUE': |
|
470 | query = (" SELECT utc_date_time, actual_value " |
|
471 | " FROM tbl_analog_value " |
|
472 | " WHERE point_id = %s " |
|
473 | " AND utc_date_time BETWEEN %s AND %s " |
|
474 | " ORDER BY utc_date_time ") |
|
475 | cursor_historical.execute(query, (point['id'], |
|
476 | reporting_start_datetime_utc, |
|
477 | reporting_end_datetime_utc)) |
|
478 | rows = cursor_historical.fetchall() |
|
479 | ||
480 | if rows is not None and len(rows) > 0: |
|
481 | for row in rows: |
|
482 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
483 | timedelta(minutes=timezone_offset) |
|
484 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
485 | point_timestamps.append(current_datetime) |
|
486 | point_values.append(row[1]) |
|
487 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
488 | query = (" SELECT utc_date_time, actual_value " |
|
489 | " FROM tbl_digital_value " |
|
490 | " WHERE point_id = %s " |
|
491 | " AND utc_date_time BETWEEN %s AND %s " |
|
492 | " ORDER BY utc_date_time ") |
|
493 | cursor_historical.execute(query, (point['id'], |
|
494 | reporting_start_datetime_utc, |
|
495 | reporting_end_datetime_utc)) |
|
496 | rows = cursor_historical.fetchall() |
|
497 | ||
498 | if rows is not None and len(rows) > 0: |
|
499 | for row in rows: |
|
500 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
501 | timedelta(minutes=timezone_offset) |
|
502 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
503 | point_timestamps.append(current_datetime) |
|
504 | point_values.append(row[1]) |
|
505 | ||
506 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
507 | parameters_data['timestamps'].append(point_timestamps) |
|
508 | parameters_data['values'].append(point_values) |
|
509 | ||
510 | ################################################################################################################ |
|
511 | # Step 10: construct the report |
|
512 | ################################################################################################################ |
|
513 | if cursor_system: |
|
514 | cursor_system.close() |
|
515 | if cnx_system: |
|
516 | cnx_system.close() |
|
517 | ||
518 | if cursor_billing: |
|
519 | cursor_billing.close() |
|
520 | if cnx_billing: |
|
521 | cnx_billing.close() |
|
522 | ||
523 | if cursor_historical: |
|
524 | cursor_historical.close() |
|
525 | if cnx_historical: |
|
526 | cnx_historical.close() |
|
527 | ||
528 | result = dict() |
|
529 | ||
530 | result['tenant'] = dict() |
|
531 | result['tenant']['name'] = tenant['name'] |
|
532 | result['tenant']['area'] = tenant['area'] |
|
533 | ||
534 | result['base_period'] = dict() |
|
535 | result['base_period']['names'] = list() |
|
536 | result['base_period']['units'] = list() |
|
537 | result['base_period']['timestamps'] = list() |
|
538 | result['base_period']['values'] = list() |
|
539 | result['base_period']['subtotals'] = list() |
|
540 | result['base_period']['total'] = Decimal(0.0) |
|
541 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
542 | for energy_category_id in energy_category_set: |
|
543 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
544 | result['base_period']['units'].append(config.currency_unit) |
|
545 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
546 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
547 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
548 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
|
549 | ||
550 | result['reporting_period'] = dict() |
|
551 | result['reporting_period']['names'] = list() |
|
552 | result['reporting_period']['energy_category_ids'] = list() |
|
553 | result['reporting_period']['units'] = list() |
|
554 | result['reporting_period']['timestamps'] = list() |
|
555 | result['reporting_period']['values'] = list() |
|
556 | result['reporting_period']['rates'] = list() |
|
557 | result['reporting_period']['subtotals'] = list() |
|
558 | result['reporting_period']['subtotals_per_unit_area'] = list() |
|
559 | result['reporting_period']['toppeaks'] = list() |
|
560 | result['reporting_period']['onpeaks'] = list() |
|
561 | result['reporting_period']['midpeaks'] = list() |
|
562 | result['reporting_period']['offpeaks'] = list() |
|
563 | result['reporting_period']['deeps'] = list() |
|
564 | result['reporting_period']['increment_rates'] = list() |
|
565 | result['reporting_period']['total'] = Decimal(0.0) |
|
566 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
|
567 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
|
568 | result['reporting_period']['total_unit'] = config.currency_unit |
|
569 | ||
570 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
571 | for energy_category_id in energy_category_set: |
|
572 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
573 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
574 | result['reporting_period']['units'].append(config.currency_unit) |
|
575 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
576 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
577 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
578 | result['reporting_period']['subtotals_per_unit_area'].append( |
|
579 | reporting[energy_category_id]['subtotal'] / tenant['area'] if tenant['area'] > 0.0 else None) |
|
580 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
|
581 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
|
582 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
|
583 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
|
584 | result['reporting_period']['deeps'].append(reporting[energy_category_id]['deep']) |
|
585 | result['reporting_period']['increment_rates'].append( |
|
586 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
|
587 | base[energy_category_id]['subtotal'] |
|
588 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
|
589 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
|
590 | ||
591 | rate = list() |
|
592 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
593 | if index < len(base[energy_category_id]['values']) \ |
|
594 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
595 | rate.append((value - base[energy_category_id]['values'][index]) |
|
596 | / base[energy_category_id]['values'][index]) |
|
597 | else: |
|
598 | rate.append(None) |
|
599 | result['reporting_period']['rates'].append(rate) |
|
600 | ||
601 | result['reporting_period']['total_per_unit_area'] = \ |
|
602 | result['reporting_period']['total'] / tenant['area'] if tenant['area'] > 0.0 else None |
|
603 | ||
604 | result['reporting_period']['total_increment_rate'] = \ |
|
605 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
|
606 | result['base_period']['total'] \ |
|
607 | if result['base_period']['total'] > Decimal(0.0) else None |
|
608 | ||
609 | result['parameters'] = { |
|
610 | "names": parameters_data['names'], |
|
611 | "timestamps": parameters_data['timestamps'], |
|
612 | "values": parameters_data['values'] |
|
613 | } |
|
614 | ||
615 | # export result to Excel file and then encode the file to base64 string |
|
616 | if not is_quick_mode: |
|
617 | result['excel_bytes_base64'] = excelexporters.tenantcost.export(result, |
|
618 | tenant['name'], |
|
619 | base_period_start_datetime_local, |
|
620 | base_period_end_datetime_local, |
|
621 | reporting_period_start_datetime_local, |
|
622 | reporting_period_end_datetime_local, |
|
623 | period_type, |
|
624 | language) |
|
625 | ||
626 | resp.text = json.dumps(result) |
|
627 |
@@ 13-624 (lines=612) @@ | ||
10 | from core.useractivity import access_control, api_key_control |
|
11 | ||
12 | ||
13 | class Reporting: |
|
14 | def __init__(self): |
|
15 | """"Initializes Reporting""" |
|
16 | pass |
|
17 | ||
18 | @staticmethod |
|
19 | def on_options(req, resp): |
|
20 | _ = req |
|
21 | resp.status = falcon.HTTP_200 |
|
22 | ||
23 | #################################################################################################################### |
|
24 | # PROCEDURES |
|
25 | # Step 1: valid parameters |
|
26 | # Step 2: query the store |
|
27 | # Step 3: query energy categories |
|
28 | # Step 4: query associated sensors |
|
29 | # Step 5: query associated points |
|
30 | # Step 6: query base period energy cost |
|
31 | # Step 7: query reporting period energy cost |
|
32 | # Step 8: query tariff data |
|
33 | # Step 9: query associated sensors and points data |
|
34 | # Step 10: construct the report |
|
35 | #################################################################################################################### |
|
36 | @staticmethod |
|
37 | def on_get(req, resp): |
|
38 | if 'API-KEY' not in req.headers or \ |
|
39 | not isinstance(req.headers['API-KEY'], str) or \ |
|
40 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
41 | access_control(req) |
|
42 | else: |
|
43 | api_key_control(req) |
|
44 | print(req.params) |
|
45 | store_id = req.params.get('storeid') |
|
46 | store_uuid = req.params.get('storeuuid') |
|
47 | period_type = req.params.get('periodtype') |
|
48 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
49 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
50 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
51 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
52 | language = req.params.get('language') |
|
53 | quick_mode = req.params.get('quickmode') |
|
54 | ||
55 | ################################################################################################################ |
|
56 | # Step 1: valid parameters |
|
57 | ################################################################################################################ |
|
58 | if store_id is None and store_uuid is None: |
|
59 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
60 | title='API.BAD_REQUEST', |
|
61 | description='API.INVALID_STORE_ID') |
|
62 | ||
63 | if store_id is not None: |
|
64 | store_id = str.strip(store_id) |
|
65 | if not store_id.isdigit() or int(store_id) <= 0: |
|
66 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
67 | title='API.BAD_REQUEST', |
|
68 | description='API.INVALID_STORE_ID') |
|
69 | ||
70 | if store_uuid is not None: |
|
71 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
72 | match = regex.match(str.strip(store_uuid)) |
|
73 | if not bool(match): |
|
74 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
75 | title='API.BAD_REQUEST', |
|
76 | description='API.INVALID_STORE_UUID') |
|
77 | ||
78 | if period_type is None: |
|
79 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
80 | description='API.INVALID_PERIOD_TYPE') |
|
81 | else: |
|
82 | period_type = str.strip(period_type) |
|
83 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
84 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
85 | description='API.INVALID_PERIOD_TYPE') |
|
86 | ||
87 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
88 | if config.utc_offset[0] == '-': |
|
89 | timezone_offset = -timezone_offset |
|
90 | ||
91 | base_start_datetime_utc = None |
|
92 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
93 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
94 | try: |
|
95 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
96 | except ValueError: |
|
97 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
98 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
99 | base_start_datetime_utc = \ |
|
100 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
101 | # nomalize the start datetime |
|
102 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
103 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
104 | else: |
|
105 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
106 | ||
107 | base_end_datetime_utc = None |
|
108 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
109 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
110 | try: |
|
111 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
112 | except ValueError: |
|
113 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
114 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
115 | base_end_datetime_utc = \ |
|
116 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
117 | ||
118 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
119 | base_start_datetime_utc >= base_end_datetime_utc: |
|
120 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
121 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
122 | ||
123 | if reporting_period_start_datetime_local is None: |
|
124 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
125 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
126 | else: |
|
127 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
128 | try: |
|
129 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
130 | '%Y-%m-%dT%H:%M:%S') |
|
131 | except ValueError: |
|
132 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
133 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
134 | reporting_start_datetime_utc = \ |
|
135 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
136 | # nomalize the start datetime |
|
137 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
138 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
139 | else: |
|
140 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
141 | ||
142 | if reporting_period_end_datetime_local is None: |
|
143 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
144 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
145 | else: |
|
146 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
147 | try: |
|
148 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
149 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
150 | timedelta(minutes=timezone_offset) |
|
151 | except ValueError: |
|
152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
153 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
154 | ||
155 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
157 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
158 | ||
159 | # if turn quick mode on, do not return parameters data and excel file |
|
160 | is_quick_mode = False |
|
161 | if quick_mode is not None and \ |
|
162 | len(str.strip(quick_mode)) > 0 and \ |
|
163 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
164 | is_quick_mode = True |
|
165 | ||
166 | trans = utilities.get_translation(language) |
|
167 | trans.install() |
|
168 | _ = trans.gettext |
|
169 | ||
170 | ################################################################################################################ |
|
171 | # Step 2: query the store |
|
172 | ################################################################################################################ |
|
173 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
174 | cursor_system = cnx_system.cursor() |
|
175 | ||
176 | cnx_billing = mysql.connector.connect(**config.myems_billing_db) |
|
177 | cursor_billing = cnx_billing.cursor() |
|
178 | ||
179 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
180 | cursor_historical = cnx_historical.cursor() |
|
181 | ||
182 | if store_id is not None: |
|
183 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
184 | " FROM tbl_stores " |
|
185 | " WHERE id = %s ", (store_id,)) |
|
186 | row_store = cursor_system.fetchone() |
|
187 | elif store_uuid is not None: |
|
188 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
189 | " FROM tbl_stores " |
|
190 | " WHERE uuid = %s ", (store_uuid,)) |
|
191 | row_store = cursor_system.fetchone() |
|
192 | ||
193 | if row_store is None: |
|
194 | if cursor_system: |
|
195 | cursor_system.close() |
|
196 | if cnx_system: |
|
197 | cnx_system.close() |
|
198 | ||
199 | if cursor_billing: |
|
200 | cursor_billing.close() |
|
201 | if cnx_billing: |
|
202 | cnx_billing.close() |
|
203 | ||
204 | if cursor_historical: |
|
205 | cursor_historical.close() |
|
206 | if cnx_historical: |
|
207 | cnx_historical.close() |
|
208 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.STORE_NOT_FOUND') |
|
209 | ||
210 | store = dict() |
|
211 | store['id'] = row_store[0] |
|
212 | store['name'] = row_store[1] |
|
213 | store['area'] = row_store[2] |
|
214 | store['cost_center_id'] = row_store[3] |
|
215 | ||
216 | ################################################################################################################ |
|
217 | # Step 3: query energy categories |
|
218 | ################################################################################################################ |
|
219 | energy_category_set = set() |
|
220 | # query energy categories in base period |
|
221 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
|
222 | " FROM tbl_store_input_category_hourly " |
|
223 | " WHERE store_id = %s " |
|
224 | " AND start_datetime_utc >= %s " |
|
225 | " AND start_datetime_utc < %s ", |
|
226 | (store['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
227 | rows_energy_categories = cursor_billing.fetchall() |
|
228 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
229 | for row_energy_category in rows_energy_categories: |
|
230 | energy_category_set.add(row_energy_category[0]) |
|
231 | ||
232 | # query energy categories in reporting period |
|
233 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
|
234 | " FROM tbl_store_input_category_hourly " |
|
235 | " WHERE store_id = %s " |
|
236 | " AND start_datetime_utc >= %s " |
|
237 | " AND start_datetime_utc < %s ", |
|
238 | (store['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
239 | rows_energy_categories = cursor_billing.fetchall() |
|
240 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
241 | for row_energy_category in rows_energy_categories: |
|
242 | energy_category_set.add(row_energy_category[0]) |
|
243 | ||
244 | # query all energy categories in base period and reporting period |
|
245 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
246 | " FROM tbl_energy_categories " |
|
247 | " ORDER BY id ", ) |
|
248 | rows_energy_categories = cursor_system.fetchall() |
|
249 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
250 | if cursor_system: |
|
251 | cursor_system.close() |
|
252 | if cnx_system: |
|
253 | cnx_system.close() |
|
254 | ||
255 | if cursor_billing: |
|
256 | cursor_billing.close() |
|
257 | if cnx_billing: |
|
258 | cnx_billing.close() |
|
259 | ||
260 | if cursor_historical: |
|
261 | cursor_historical.close() |
|
262 | if cnx_historical: |
|
263 | cnx_historical.close() |
|
264 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
265 | title='API.NOT_FOUND', |
|
266 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
267 | energy_category_dict = dict() |
|
268 | for row_energy_category in rows_energy_categories: |
|
269 | if row_energy_category[0] in energy_category_set: |
|
270 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
271 | "unit_of_measure": row_energy_category[2], |
|
272 | "kgce": row_energy_category[3], |
|
273 | "kgco2e": row_energy_category[4]} |
|
274 | ||
275 | ################################################################################################################ |
|
276 | # Step 4: query associated sensors |
|
277 | ################################################################################################################ |
|
278 | point_list = list() |
|
279 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
280 | " FROM tbl_stores st, tbl_sensors se, tbl_stores_sensors ss, " |
|
281 | " tbl_points p, tbl_sensors_points sp " |
|
282 | " WHERE st.id = %s AND st.id = ss.store_id AND ss.sensor_id = se.id " |
|
283 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
|
284 | " ORDER BY p.id ", (store['id'],)) |
|
285 | rows_points = cursor_system.fetchall() |
|
286 | if rows_points is not None and len(rows_points) > 0: |
|
287 | for row in rows_points: |
|
288 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
289 | ||
290 | ################################################################################################################ |
|
291 | # Step 5: query associated points |
|
292 | ################################################################################################################ |
|
293 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
294 | " FROM tbl_stores s, tbl_stores_points sp, tbl_points p " |
|
295 | " WHERE s.id = %s AND s.id = sp.store_id AND sp.point_id = p.id " |
|
296 | " ORDER BY p.id ", (store['id'],)) |
|
297 | rows_points = cursor_system.fetchall() |
|
298 | if rows_points is not None and len(rows_points) > 0: |
|
299 | for row in rows_points: |
|
300 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
301 | ||
302 | ################################################################################################################ |
|
303 | # Step 6: query base period energy cost |
|
304 | ################################################################################################################ |
|
305 | base = dict() |
|
306 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
307 | for energy_category_id in energy_category_set: |
|
308 | base[energy_category_id] = dict() |
|
309 | base[energy_category_id]['timestamps'] = list() |
|
310 | base[energy_category_id]['values'] = list() |
|
311 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
312 | ||
313 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
|
314 | " FROM tbl_store_input_category_hourly " |
|
315 | " WHERE store_id = %s " |
|
316 | " AND energy_category_id = %s " |
|
317 | " AND start_datetime_utc >= %s " |
|
318 | " AND start_datetime_utc < %s " |
|
319 | " ORDER BY start_datetime_utc ", |
|
320 | (store['id'], |
|
321 | energy_category_id, |
|
322 | base_start_datetime_utc, |
|
323 | base_end_datetime_utc)) |
|
324 | rows_store_hourly = cursor_billing.fetchall() |
|
325 | ||
326 | rows_store_periodically = utilities.aggregate_hourly_data_by_period(rows_store_hourly, |
|
327 | base_start_datetime_utc, |
|
328 | base_end_datetime_utc, |
|
329 | period_type) |
|
330 | for row_store_periodically in rows_store_periodically: |
|
331 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
332 | timedelta(minutes=timezone_offset) |
|
333 | if period_type == 'hourly': |
|
334 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
335 | elif period_type == 'daily': |
|
336 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
337 | elif period_type == 'weekly': |
|
338 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
339 | elif period_type == 'monthly': |
|
340 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
341 | elif period_type == 'yearly': |
|
342 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
343 | ||
344 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
|
345 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
346 | base[energy_category_id]['values'].append(actual_value) |
|
347 | base[energy_category_id]['subtotal'] += actual_value |
|
348 | ||
349 | ################################################################################################################ |
|
350 | # Step 7: query reporting period energy cost |
|
351 | ################################################################################################################ |
|
352 | reporting = dict() |
|
353 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
354 | for energy_category_id in energy_category_set: |
|
355 | reporting[energy_category_id] = dict() |
|
356 | reporting[energy_category_id]['timestamps'] = list() |
|
357 | reporting[energy_category_id]['values'] = list() |
|
358 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
359 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
|
360 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
|
361 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
|
362 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
|
363 | reporting[energy_category_id]['deep'] = Decimal(0.0) |
|
364 | ||
365 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
|
366 | " FROM tbl_store_input_category_hourly " |
|
367 | " WHERE store_id = %s " |
|
368 | " AND energy_category_id = %s " |
|
369 | " AND start_datetime_utc >= %s " |
|
370 | " AND start_datetime_utc < %s " |
|
371 | " ORDER BY start_datetime_utc ", |
|
372 | (store['id'], |
|
373 | energy_category_id, |
|
374 | reporting_start_datetime_utc, |
|
375 | reporting_end_datetime_utc)) |
|
376 | rows_store_hourly = cursor_billing.fetchall() |
|
377 | ||
378 | rows_store_periodically = utilities.aggregate_hourly_data_by_period(rows_store_hourly, |
|
379 | reporting_start_datetime_utc, |
|
380 | reporting_end_datetime_utc, |
|
381 | period_type) |
|
382 | for row_store_periodically in rows_store_periodically: |
|
383 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
384 | timedelta(minutes=timezone_offset) |
|
385 | if period_type == 'hourly': |
|
386 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
387 | elif period_type == 'daily': |
|
388 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
389 | elif period_type == 'weekly': |
|
390 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
391 | elif period_type == 'monthly': |
|
392 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
393 | elif period_type == 'yearly': |
|
394 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
395 | ||
396 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
|
397 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
398 | reporting[energy_category_id]['values'].append(actual_value) |
|
399 | reporting[energy_category_id]['subtotal'] += actual_value |
|
400 | ||
401 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(store['cost_center_id'], |
|
402 | energy_category_id, |
|
403 | reporting_start_datetime_utc, |
|
404 | reporting_end_datetime_utc) |
|
405 | for row in rows_store_hourly: |
|
406 | peak_type = energy_category_tariff_dict.get(row[0], None) |
|
407 | if peak_type == 'toppeak': |
|
408 | reporting[energy_category_id]['toppeak'] += row[1] |
|
409 | elif peak_type == 'onpeak': |
|
410 | reporting[energy_category_id]['onpeak'] += row[1] |
|
411 | elif peak_type == 'midpeak': |
|
412 | reporting[energy_category_id]['midpeak'] += row[1] |
|
413 | elif peak_type == 'offpeak': |
|
414 | reporting[energy_category_id]['offpeak'] += row[1] |
|
415 | elif peak_type == 'deep': |
|
416 | reporting[energy_category_id]['deep'] += row[1] |
|
417 | ||
418 | ################################################################################################################ |
|
419 | # Step 8: query tariff data |
|
420 | ################################################################################################################ |
|
421 | parameters_data = dict() |
|
422 | parameters_data['names'] = list() |
|
423 | parameters_data['timestamps'] = list() |
|
424 | parameters_data['values'] = list() |
|
425 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
426 | and not is_quick_mode: |
|
427 | for energy_category_id in energy_category_set: |
|
428 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(store['cost_center_id'], |
|
429 | energy_category_id, |
|
430 | reporting_start_datetime_utc, |
|
431 | reporting_end_datetime_utc) |
|
432 | tariff_timestamp_list = list() |
|
433 | tariff_value_list = list() |
|
434 | for k, v in energy_category_tariff_dict.items(): |
|
435 | # convert k from utc to local |
|
436 | k = k + timedelta(minutes=timezone_offset) |
|
437 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
438 | tariff_value_list.append(v) |
|
439 | ||
440 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
441 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
442 | parameters_data['values'].append(tariff_value_list) |
|
443 | ||
444 | ################################################################################################################ |
|
445 | # Step 9: query associated sensors and points data |
|
446 | ################################################################################################################ |
|
447 | if not is_quick_mode: |
|
448 | for point in point_list: |
|
449 | point_values = [] |
|
450 | point_timestamps = [] |
|
451 | if point['object_type'] == 'ENERGY_VALUE': |
|
452 | query = (" SELECT utc_date_time, actual_value " |
|
453 | " FROM tbl_energy_value " |
|
454 | " WHERE point_id = %s " |
|
455 | " AND utc_date_time BETWEEN %s AND %s " |
|
456 | " ORDER BY utc_date_time ") |
|
457 | cursor_historical.execute(query, (point['id'], |
|
458 | reporting_start_datetime_utc, |
|
459 | reporting_end_datetime_utc)) |
|
460 | rows = cursor_historical.fetchall() |
|
461 | ||
462 | if rows is not None and len(rows) > 0: |
|
463 | for row in rows: |
|
464 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
465 | timedelta(minutes=timezone_offset) |
|
466 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
467 | point_timestamps.append(current_datetime) |
|
468 | point_values.append(row[1]) |
|
469 | elif point['object_type'] == 'ANALOG_VALUE': |
|
470 | query = (" SELECT utc_date_time, actual_value " |
|
471 | " FROM tbl_analog_value " |
|
472 | " WHERE point_id = %s " |
|
473 | " AND utc_date_time BETWEEN %s AND %s " |
|
474 | " ORDER BY utc_date_time ") |
|
475 | cursor_historical.execute(query, (point['id'], |
|
476 | reporting_start_datetime_utc, |
|
477 | reporting_end_datetime_utc)) |
|
478 | rows = cursor_historical.fetchall() |
|
479 | ||
480 | if rows is not None and len(rows) > 0: |
|
481 | for row in rows: |
|
482 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
483 | timedelta(minutes=timezone_offset) |
|
484 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
485 | point_timestamps.append(current_datetime) |
|
486 | point_values.append(row[1]) |
|
487 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
488 | query = (" SELECT utc_date_time, actual_value " |
|
489 | " FROM tbl_digital_value " |
|
490 | " WHERE point_id = %s " |
|
491 | " AND utc_date_time BETWEEN %s AND %s " |
|
492 | " ORDER BY utc_date_time ") |
|
493 | cursor_historical.execute(query, (point['id'], |
|
494 | reporting_start_datetime_utc, |
|
495 | reporting_end_datetime_utc)) |
|
496 | rows = cursor_historical.fetchall() |
|
497 | ||
498 | if rows is not None and len(rows) > 0: |
|
499 | for row in rows: |
|
500 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
501 | timedelta(minutes=timezone_offset) |
|
502 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
503 | point_timestamps.append(current_datetime) |
|
504 | point_values.append(row[1]) |
|
505 | ||
506 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
507 | parameters_data['timestamps'].append(point_timestamps) |
|
508 | parameters_data['values'].append(point_values) |
|
509 | ||
510 | ################################################################################################################ |
|
511 | # Step 10: construct the report |
|
512 | ################################################################################################################ |
|
513 | if cursor_system: |
|
514 | cursor_system.close() |
|
515 | if cnx_system: |
|
516 | cnx_system.close() |
|
517 | ||
518 | if cursor_billing: |
|
519 | cursor_billing.close() |
|
520 | if cnx_billing: |
|
521 | cnx_billing.close() |
|
522 | ||
523 | if cursor_historical: |
|
524 | cursor_historical.close() |
|
525 | if cnx_historical: |
|
526 | cnx_historical.close() |
|
527 | ||
528 | result = dict() |
|
529 | ||
530 | result['store'] = dict() |
|
531 | result['store']['name'] = store['name'] |
|
532 | result['store']['area'] = store['area'] |
|
533 | ||
534 | result['base_period'] = dict() |
|
535 | result['base_period']['names'] = list() |
|
536 | result['base_period']['units'] = list() |
|
537 | result['base_period']['timestamps'] = list() |
|
538 | result['base_period']['values'] = list() |
|
539 | result['base_period']['subtotals'] = list() |
|
540 | result['base_period']['total'] = Decimal(0.0) |
|
541 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
542 | for energy_category_id in energy_category_set: |
|
543 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
544 | result['base_period']['units'].append(config.currency_unit) |
|
545 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
546 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
547 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
548 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
|
549 | ||
550 | result['reporting_period'] = dict() |
|
551 | result['reporting_period']['names'] = list() |
|
552 | result['reporting_period']['energy_category_ids'] = list() |
|
553 | result['reporting_period']['units'] = list() |
|
554 | result['reporting_period']['timestamps'] = list() |
|
555 | result['reporting_period']['values'] = list() |
|
556 | result['reporting_period']['rates'] = list() |
|
557 | result['reporting_period']['subtotals'] = list() |
|
558 | result['reporting_period']['subtotals_per_unit_area'] = list() |
|
559 | result['reporting_period']['toppeaks'] = list() |
|
560 | result['reporting_period']['onpeaks'] = list() |
|
561 | result['reporting_period']['midpeaks'] = list() |
|
562 | result['reporting_period']['offpeaks'] = list() |
|
563 | result['reporting_period']['deeps'] = list() |
|
564 | result['reporting_period']['increment_rates'] = list() |
|
565 | result['reporting_period']['total'] = Decimal(0.0) |
|
566 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
|
567 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
|
568 | result['reporting_period']['total_unit'] = config.currency_unit |
|
569 | ||
570 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
571 | for energy_category_id in energy_category_set: |
|
572 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
573 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
574 | result['reporting_period']['units'].append(config.currency_unit) |
|
575 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
576 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
577 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
578 | result['reporting_period']['subtotals_per_unit_area'].append( |
|
579 | reporting[energy_category_id]['subtotal'] / store['area'] if store['area'] > 0.0 else None) |
|
580 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
|
581 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
|
582 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
|
583 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
|
584 | result['reporting_period']['deeps'].append(reporting[energy_category_id]['deep']) |
|
585 | result['reporting_period']['increment_rates'].append( |
|
586 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
|
587 | base[energy_category_id]['subtotal'] |
|
588 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
|
589 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
|
590 | ||
591 | rate = list() |
|
592 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
593 | if index < len(base[energy_category_id]['values']) \ |
|
594 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
595 | rate.append((value - base[energy_category_id]['values'][index]) |
|
596 | / base[energy_category_id]['values'][index]) |
|
597 | else: |
|
598 | rate.append(None) |
|
599 | result['reporting_period']['rates'].append(rate) |
|
600 | ||
601 | result['reporting_period']['total_per_unit_area'] = \ |
|
602 | result['reporting_period']['total'] / store['area'] if store['area'] > 0.0 else None |
|
603 | ||
604 | result['reporting_period']['total_increment_rate'] = \ |
|
605 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
|
606 | result['base_period']['total'] \ |
|
607 | if result['base_period']['total'] > Decimal(0.0) else None |
|
608 | ||
609 | result['parameters'] = { |
|
610 | "names": parameters_data['names'], |
|
611 | "timestamps": parameters_data['timestamps'], |
|
612 | "values": parameters_data['values'] |
|
613 | } |
|
614 | # export result to Excel file and then encode the file to base64 string |
|
615 | if not is_quick_mode: |
|
616 | result['excel_bytes_base64'] = excelexporters.storecost.export(result, |
|
617 | store['name'], |
|
618 | base_period_start_datetime_local, |
|
619 | base_period_end_datetime_local, |
|
620 | reporting_period_start_datetime_local, |
|
621 | reporting_period_end_datetime_local, |
|
622 | period_type, |
|
623 | language) |
|
624 | resp.text = json.dumps(result) |
|
625 |
@@ 13-624 (lines=612) @@ | ||
10 | from core.useractivity import access_control, api_key_control |
|
11 | ||
12 | ||
13 | class Reporting: |
|
14 | def __init__(self): |
|
15 | """"Initializes Reporting""" |
|
16 | pass |
|
17 | ||
18 | @staticmethod |
|
19 | def on_options(req, resp): |
|
20 | _ = req |
|
21 | resp.status = falcon.HTTP_200 |
|
22 | ||
23 | #################################################################################################################### |
|
24 | # PROCEDURES |
|
25 | # Step 1: valid parameters |
|
26 | # Step 2: query the store |
|
27 | # Step 3: query energy categories |
|
28 | # Step 4: query associated sensors |
|
29 | # Step 5: query associated points |
|
30 | # Step 6: query base period energy carbon dioxide emissions |
|
31 | # Step 7: query reporting period energy carbon dioxide emissions |
|
32 | # Step 8: query tariff data |
|
33 | # Step 9: query associated sensors and points data |
|
34 | # Step 10: construct the report |
|
35 | #################################################################################################################### |
|
36 | @staticmethod |
|
37 | def on_get(req, resp): |
|
38 | if 'API-KEY' not in req.headers or \ |
|
39 | not isinstance(req.headers['API-KEY'], str) or \ |
|
40 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
41 | access_control(req) |
|
42 | else: |
|
43 | api_key_control(req) |
|
44 | print(req.params) |
|
45 | store_id = req.params.get('storeid') |
|
46 | store_uuid = req.params.get('storeuuid') |
|
47 | period_type = req.params.get('periodtype') |
|
48 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
49 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
50 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
51 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
52 | language = req.params.get('language') |
|
53 | quick_mode = req.params.get('quickmode') |
|
54 | ||
55 | ################################################################################################################ |
|
56 | # Step 1: valid parameters |
|
57 | ################################################################################################################ |
|
58 | if store_id is None and store_uuid is None: |
|
59 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
60 | title='API.BAD_REQUEST', |
|
61 | description='API.INVALID_STORE_ID') |
|
62 | ||
63 | if store_id is not None: |
|
64 | store_id = str.strip(store_id) |
|
65 | if not store_id.isdigit() or int(store_id) <= 0: |
|
66 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
67 | title='API.BAD_REQUEST', |
|
68 | description='API.INVALID_STORE_ID') |
|
69 | ||
70 | if store_uuid is not None: |
|
71 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
72 | match = regex.match(str.strip(store_uuid)) |
|
73 | if not bool(match): |
|
74 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
75 | title='API.BAD_REQUEST', |
|
76 | description='API.INVALID_STORE_UUID') |
|
77 | ||
78 | if period_type is None: |
|
79 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
80 | description='API.INVALID_PERIOD_TYPE') |
|
81 | else: |
|
82 | period_type = str.strip(period_type) |
|
83 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
84 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
85 | description='API.INVALID_PERIOD_TYPE') |
|
86 | ||
87 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
88 | if config.utc_offset[0] == '-': |
|
89 | timezone_offset = -timezone_offset |
|
90 | ||
91 | base_start_datetime_utc = None |
|
92 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
93 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
94 | try: |
|
95 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
96 | except ValueError: |
|
97 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
98 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
99 | base_start_datetime_utc = \ |
|
100 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
101 | # nomalize the start datetime |
|
102 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
103 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
104 | else: |
|
105 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
106 | ||
107 | base_end_datetime_utc = None |
|
108 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
109 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
110 | try: |
|
111 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
112 | except ValueError: |
|
113 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
114 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
115 | base_end_datetime_utc = \ |
|
116 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
117 | ||
118 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
119 | base_start_datetime_utc >= base_end_datetime_utc: |
|
120 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
121 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
122 | ||
123 | if reporting_period_start_datetime_local is None: |
|
124 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
125 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
126 | else: |
|
127 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
128 | try: |
|
129 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
130 | '%Y-%m-%dT%H:%M:%S') |
|
131 | except ValueError: |
|
132 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
133 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
134 | reporting_start_datetime_utc = \ |
|
135 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
136 | # nomalize the start datetime |
|
137 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
138 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
139 | else: |
|
140 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
141 | ||
142 | if reporting_period_end_datetime_local is None: |
|
143 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
144 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
145 | else: |
|
146 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
147 | try: |
|
148 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
149 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
150 | timedelta(minutes=timezone_offset) |
|
151 | except ValueError: |
|
152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
153 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
154 | ||
155 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
157 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
158 | ||
159 | # if turn quick mode on, do not return parameters data and excel file |
|
160 | is_quick_mode = False |
|
161 | if quick_mode is not None and \ |
|
162 | len(str.strip(quick_mode)) > 0 and \ |
|
163 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
164 | is_quick_mode = True |
|
165 | ||
166 | trans = utilities.get_translation(language) |
|
167 | trans.install() |
|
168 | _ = trans.gettext |
|
169 | ||
170 | ################################################################################################################ |
|
171 | # Step 2: query the store |
|
172 | ################################################################################################################ |
|
173 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
174 | cursor_system = cnx_system.cursor() |
|
175 | ||
176 | cnx_carbon = mysql.connector.connect(**config.myems_carbon_db) |
|
177 | cursor_carbon = cnx_carbon.cursor() |
|
178 | ||
179 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
180 | cursor_historical = cnx_historical.cursor() |
|
181 | ||
182 | if store_id is not None: |
|
183 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
184 | " FROM tbl_stores " |
|
185 | " WHERE id = %s ", (store_id,)) |
|
186 | row_store = cursor_system.fetchone() |
|
187 | elif store_uuid is not None: |
|
188 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
189 | " FROM tbl_stores " |
|
190 | " WHERE uuid = %s ", (store_uuid,)) |
|
191 | row_store = cursor_system.fetchone() |
|
192 | ||
193 | if row_store is None: |
|
194 | if cursor_system: |
|
195 | cursor_system.close() |
|
196 | if cnx_system: |
|
197 | cnx_system.close() |
|
198 | ||
199 | if cursor_carbon: |
|
200 | cursor_carbon.close() |
|
201 | if cnx_carbon: |
|
202 | cnx_carbon.close() |
|
203 | ||
204 | if cursor_historical: |
|
205 | cursor_historical.close() |
|
206 | if cnx_historical: |
|
207 | cnx_historical.close() |
|
208 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.STORE_NOT_FOUND') |
|
209 | ||
210 | store = dict() |
|
211 | store['id'] = row_store[0] |
|
212 | store['name'] = row_store[1] |
|
213 | store['area'] = row_store[2] |
|
214 | store['cost_center_id'] = row_store[3] |
|
215 | ||
216 | ################################################################################################################ |
|
217 | # Step 3: query energy categories |
|
218 | ################################################################################################################ |
|
219 | energy_category_set = set() |
|
220 | # query energy categories in base period |
|
221 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
|
222 | " FROM tbl_store_input_category_hourly " |
|
223 | " WHERE store_id = %s " |
|
224 | " AND start_datetime_utc >= %s " |
|
225 | " AND start_datetime_utc < %s ", |
|
226 | (store['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
227 | rows_energy_categories = cursor_carbon.fetchall() |
|
228 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
229 | for row_energy_category in rows_energy_categories: |
|
230 | energy_category_set.add(row_energy_category[0]) |
|
231 | ||
232 | # query energy categories in reporting period |
|
233 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
|
234 | " FROM tbl_store_input_category_hourly " |
|
235 | " WHERE store_id = %s " |
|
236 | " AND start_datetime_utc >= %s " |
|
237 | " AND start_datetime_utc < %s ", |
|
238 | (store['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
239 | rows_energy_categories = cursor_carbon.fetchall() |
|
240 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
241 | for row_energy_category in rows_energy_categories: |
|
242 | energy_category_set.add(row_energy_category[0]) |
|
243 | ||
244 | # query all energy categories in base period and reporting period |
|
245 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
246 | " FROM tbl_energy_categories " |
|
247 | " ORDER BY id ", ) |
|
248 | rows_energy_categories = cursor_system.fetchall() |
|
249 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
250 | if cursor_system: |
|
251 | cursor_system.close() |
|
252 | if cnx_system: |
|
253 | cnx_system.close() |
|
254 | ||
255 | if cursor_carbon: |
|
256 | cursor_carbon.close() |
|
257 | if cnx_carbon: |
|
258 | cnx_carbon.close() |
|
259 | ||
260 | if cursor_historical: |
|
261 | cursor_historical.close() |
|
262 | if cnx_historical: |
|
263 | cnx_historical.close() |
|
264 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
265 | title='API.NOT_FOUND', |
|
266 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
267 | energy_category_dict = dict() |
|
268 | for row_energy_category in rows_energy_categories: |
|
269 | if row_energy_category[0] in energy_category_set: |
|
270 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
271 | "unit_of_measure": row_energy_category[2], |
|
272 | "kgce": row_energy_category[3], |
|
273 | "kgco2e": row_energy_category[4]} |
|
274 | ||
275 | ################################################################################################################ |
|
276 | # Step 4: query associated sensors |
|
277 | ################################################################################################################ |
|
278 | point_list = list() |
|
279 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
280 | " FROM tbl_stores st, tbl_sensors se, tbl_stores_sensors ss, " |
|
281 | " tbl_points p, tbl_sensors_points sp " |
|
282 | " WHERE st.id = %s AND st.id = ss.store_id AND ss.sensor_id = se.id " |
|
283 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
|
284 | " ORDER BY p.id ", (store['id'],)) |
|
285 | rows_points = cursor_system.fetchall() |
|
286 | if rows_points is not None and len(rows_points) > 0: |
|
287 | for row in rows_points: |
|
288 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
289 | ||
290 | ################################################################################################################ |
|
291 | # Step 5: query associated points |
|
292 | ################################################################################################################ |
|
293 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
294 | " FROM tbl_stores s, tbl_stores_points sp, tbl_points p " |
|
295 | " WHERE s.id = %s AND s.id = sp.store_id AND sp.point_id = p.id " |
|
296 | " ORDER BY p.id ", (store['id'],)) |
|
297 | rows_points = cursor_system.fetchall() |
|
298 | if rows_points is not None and len(rows_points) > 0: |
|
299 | for row in rows_points: |
|
300 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
301 | ||
302 | ################################################################################################################ |
|
303 | # Step 6: query base period energy carbon dioxide emissions |
|
304 | ################################################################################################################ |
|
305 | base = dict() |
|
306 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
307 | for energy_category_id in energy_category_set: |
|
308 | base[energy_category_id] = dict() |
|
309 | base[energy_category_id]['timestamps'] = list() |
|
310 | base[energy_category_id]['values'] = list() |
|
311 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
312 | ||
313 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
|
314 | " FROM tbl_store_input_category_hourly " |
|
315 | " WHERE store_id = %s " |
|
316 | " AND energy_category_id = %s " |
|
317 | " AND start_datetime_utc >= %s " |
|
318 | " AND start_datetime_utc < %s " |
|
319 | " ORDER BY start_datetime_utc ", |
|
320 | (store['id'], |
|
321 | energy_category_id, |
|
322 | base_start_datetime_utc, |
|
323 | base_end_datetime_utc)) |
|
324 | rows_store_hourly = cursor_carbon.fetchall() |
|
325 | ||
326 | rows_store_periodically = utilities.aggregate_hourly_data_by_period(rows_store_hourly, |
|
327 | base_start_datetime_utc, |
|
328 | base_end_datetime_utc, |
|
329 | period_type) |
|
330 | for row_store_periodically in rows_store_periodically: |
|
331 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
332 | timedelta(minutes=timezone_offset) |
|
333 | if period_type == 'hourly': |
|
334 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
335 | elif period_type == 'daily': |
|
336 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
337 | elif period_type == 'weekly': |
|
338 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
339 | elif period_type == 'monthly': |
|
340 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
341 | elif period_type == 'yearly': |
|
342 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
343 | ||
344 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
|
345 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
346 | base[energy_category_id]['values'].append(actual_value) |
|
347 | base[energy_category_id]['subtotal'] += actual_value |
|
348 | ||
349 | ################################################################################################################ |
|
350 | # Step 7: query reporting period energy carbon dioxide emissions |
|
351 | ################################################################################################################ |
|
352 | reporting = dict() |
|
353 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
354 | for energy_category_id in energy_category_set: |
|
355 | reporting[energy_category_id] = dict() |
|
356 | reporting[energy_category_id]['timestamps'] = list() |
|
357 | reporting[energy_category_id]['values'] = list() |
|
358 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
359 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
|
360 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
|
361 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
|
362 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
|
363 | reporting[energy_category_id]['deep'] = Decimal(0.0) |
|
364 | ||
365 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
|
366 | " FROM tbl_store_input_category_hourly " |
|
367 | " WHERE store_id = %s " |
|
368 | " AND energy_category_id = %s " |
|
369 | " AND start_datetime_utc >= %s " |
|
370 | " AND start_datetime_utc < %s " |
|
371 | " ORDER BY start_datetime_utc ", |
|
372 | (store['id'], |
|
373 | energy_category_id, |
|
374 | reporting_start_datetime_utc, |
|
375 | reporting_end_datetime_utc)) |
|
376 | rows_store_hourly = cursor_carbon.fetchall() |
|
377 | ||
378 | rows_store_periodically = utilities.aggregate_hourly_data_by_period(rows_store_hourly, |
|
379 | reporting_start_datetime_utc, |
|
380 | reporting_end_datetime_utc, |
|
381 | period_type) |
|
382 | for row_store_periodically in rows_store_periodically: |
|
383 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
384 | timedelta(minutes=timezone_offset) |
|
385 | if period_type == 'hourly': |
|
386 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
387 | elif period_type == 'daily': |
|
388 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
389 | elif period_type == 'weekly': |
|
390 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
391 | elif period_type == 'monthly': |
|
392 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
393 | elif period_type == 'yearly': |
|
394 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
395 | ||
396 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
|
397 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
398 | reporting[energy_category_id]['values'].append(actual_value) |
|
399 | reporting[energy_category_id]['subtotal'] += actual_value |
|
400 | ||
401 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(store['cost_center_id'], |
|
402 | energy_category_id, |
|
403 | reporting_start_datetime_utc, |
|
404 | reporting_end_datetime_utc) |
|
405 | for row in rows_store_hourly: |
|
406 | peak_type = energy_category_tariff_dict.get(row[0], None) |
|
407 | if peak_type == 'toppeak': |
|
408 | reporting[energy_category_id]['toppeak'] += row[1] |
|
409 | elif peak_type == 'onpeak': |
|
410 | reporting[energy_category_id]['onpeak'] += row[1] |
|
411 | elif peak_type == 'midpeak': |
|
412 | reporting[energy_category_id]['midpeak'] += row[1] |
|
413 | elif peak_type == 'offpeak': |
|
414 | reporting[energy_category_id]['offpeak'] += row[1] |
|
415 | elif peak_type == 'deep': |
|
416 | reporting[energy_category_id]['deep'] += row[1] |
|
417 | ||
418 | ################################################################################################################ |
|
419 | # Step 8: query tariff data |
|
420 | ################################################################################################################ |
|
421 | parameters_data = dict() |
|
422 | parameters_data['names'] = list() |
|
423 | parameters_data['timestamps'] = list() |
|
424 | parameters_data['values'] = list() |
|
425 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
426 | and not is_quick_mode: |
|
427 | for energy_category_id in energy_category_set: |
|
428 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(store['cost_center_id'], |
|
429 | energy_category_id, |
|
430 | reporting_start_datetime_utc, |
|
431 | reporting_end_datetime_utc) |
|
432 | tariff_timestamp_list = list() |
|
433 | tariff_value_list = list() |
|
434 | for k, v in energy_category_tariff_dict.items(): |
|
435 | # convert k from utc to local |
|
436 | k = k + timedelta(minutes=timezone_offset) |
|
437 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
438 | tariff_value_list.append(v) |
|
439 | ||
440 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
441 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
442 | parameters_data['values'].append(tariff_value_list) |
|
443 | ||
444 | ################################################################################################################ |
|
445 | # Step 9: query associated sensors and points data |
|
446 | ################################################################################################################ |
|
447 | if not is_quick_mode: |
|
448 | for point in point_list: |
|
449 | point_values = [] |
|
450 | point_timestamps = [] |
|
451 | if point['object_type'] == 'ENERGY_VALUE': |
|
452 | query = (" SELECT utc_date_time, actual_value " |
|
453 | " FROM tbl_energy_value " |
|
454 | " WHERE point_id = %s " |
|
455 | " AND utc_date_time BETWEEN %s AND %s " |
|
456 | " ORDER BY utc_date_time ") |
|
457 | cursor_historical.execute(query, (point['id'], |
|
458 | reporting_start_datetime_utc, |
|
459 | reporting_end_datetime_utc)) |
|
460 | rows = cursor_historical.fetchall() |
|
461 | ||
462 | if rows is not None and len(rows) > 0: |
|
463 | for row in rows: |
|
464 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
465 | timedelta(minutes=timezone_offset) |
|
466 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
467 | point_timestamps.append(current_datetime) |
|
468 | point_values.append(row[1]) |
|
469 | elif point['object_type'] == 'ANALOG_VALUE': |
|
470 | query = (" SELECT utc_date_time, actual_value " |
|
471 | " FROM tbl_analog_value " |
|
472 | " WHERE point_id = %s " |
|
473 | " AND utc_date_time BETWEEN %s AND %s " |
|
474 | " ORDER BY utc_date_time ") |
|
475 | cursor_historical.execute(query, (point['id'], |
|
476 | reporting_start_datetime_utc, |
|
477 | reporting_end_datetime_utc)) |
|
478 | rows = cursor_historical.fetchall() |
|
479 | ||
480 | if rows is not None and len(rows) > 0: |
|
481 | for row in rows: |
|
482 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
483 | timedelta(minutes=timezone_offset) |
|
484 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
485 | point_timestamps.append(current_datetime) |
|
486 | point_values.append(row[1]) |
|
487 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
488 | query = (" SELECT utc_date_time, actual_value " |
|
489 | " FROM tbl_digital_value " |
|
490 | " WHERE point_id = %s " |
|
491 | " AND utc_date_time BETWEEN %s AND %s " |
|
492 | " ORDER BY utc_date_time ") |
|
493 | cursor_historical.execute(query, (point['id'], |
|
494 | reporting_start_datetime_utc, |
|
495 | reporting_end_datetime_utc)) |
|
496 | rows = cursor_historical.fetchall() |
|
497 | ||
498 | if rows is not None and len(rows) > 0: |
|
499 | for row in rows: |
|
500 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
501 | timedelta(minutes=timezone_offset) |
|
502 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
503 | point_timestamps.append(current_datetime) |
|
504 | point_values.append(row[1]) |
|
505 | ||
506 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
507 | parameters_data['timestamps'].append(point_timestamps) |
|
508 | parameters_data['values'].append(point_values) |
|
509 | ||
510 | ################################################################################################################ |
|
511 | # Step 10: construct the report |
|
512 | ################################################################################################################ |
|
513 | if cursor_system: |
|
514 | cursor_system.close() |
|
515 | if cnx_system: |
|
516 | cnx_system.close() |
|
517 | ||
518 | if cursor_carbon: |
|
519 | cursor_carbon.close() |
|
520 | if cnx_carbon: |
|
521 | cnx_carbon.close() |
|
522 | ||
523 | if cursor_historical: |
|
524 | cursor_historical.close() |
|
525 | if cnx_historical: |
|
526 | cnx_historical.close() |
|
527 | ||
528 | result = dict() |
|
529 | ||
530 | result['store'] = dict() |
|
531 | result['store']['name'] = store['name'] |
|
532 | result['store']['area'] = store['area'] |
|
533 | ||
534 | result['base_period'] = dict() |
|
535 | result['base_period']['names'] = list() |
|
536 | result['base_period']['units'] = list() |
|
537 | result['base_period']['timestamps'] = list() |
|
538 | result['base_period']['values'] = list() |
|
539 | result['base_period']['subtotals'] = list() |
|
540 | result['base_period']['total'] = Decimal(0.0) |
|
541 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
542 | for energy_category_id in energy_category_set: |
|
543 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
544 | result['base_period']['units'].append('KG') |
|
545 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
546 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
547 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
548 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
|
549 | ||
550 | result['reporting_period'] = dict() |
|
551 | result['reporting_period']['names'] = list() |
|
552 | result['reporting_period']['energy_category_ids'] = list() |
|
553 | result['reporting_period']['units'] = list() |
|
554 | result['reporting_period']['timestamps'] = list() |
|
555 | result['reporting_period']['values'] = list() |
|
556 | result['reporting_period']['rates'] = list() |
|
557 | result['reporting_period']['subtotals'] = list() |
|
558 | result['reporting_period']['subtotals_per_unit_area'] = list() |
|
559 | result['reporting_period']['toppeaks'] = list() |
|
560 | result['reporting_period']['onpeaks'] = list() |
|
561 | result['reporting_period']['midpeaks'] = list() |
|
562 | result['reporting_period']['offpeaks'] = list() |
|
563 | result['reporting_period']['deeps'] = list() |
|
564 | result['reporting_period']['increment_rates'] = list() |
|
565 | result['reporting_period']['total'] = Decimal(0.0) |
|
566 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
|
567 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
|
568 | result['reporting_period']['total_unit'] = 'KG' |
|
569 | ||
570 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
571 | for energy_category_id in energy_category_set: |
|
572 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
573 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
574 | result['reporting_period']['units'].append('KG') |
|
575 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
576 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
577 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
578 | result['reporting_period']['subtotals_per_unit_area'].append( |
|
579 | reporting[energy_category_id]['subtotal'] / store['area'] if store['area'] > 0.0 else None) |
|
580 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
|
581 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
|
582 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
|
583 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
|
584 | result['reporting_period']['deeps'].append(reporting[energy_category_id]['deep']) |
|
585 | result['reporting_period']['increment_rates'].append( |
|
586 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
|
587 | base[energy_category_id]['subtotal'] |
|
588 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
|
589 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
|
590 | ||
591 | rate = list() |
|
592 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
593 | if index < len(base[energy_category_id]['values']) \ |
|
594 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
595 | rate.append((value - base[energy_category_id]['values'][index]) |
|
596 | / base[energy_category_id]['values'][index]) |
|
597 | else: |
|
598 | rate.append(None) |
|
599 | result['reporting_period']['rates'].append(rate) |
|
600 | ||
601 | result['reporting_period']['total_per_unit_area'] = \ |
|
602 | result['reporting_period']['total'] / store['area'] if store['area'] > 0.0 else None |
|
603 | ||
604 | result['reporting_period']['total_increment_rate'] = \ |
|
605 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
|
606 | result['base_period']['total'] \ |
|
607 | if result['base_period']['total'] > Decimal(0.0) else None |
|
608 | ||
609 | result['parameters'] = { |
|
610 | "names": parameters_data['names'], |
|
611 | "timestamps": parameters_data['timestamps'], |
|
612 | "values": parameters_data['values'] |
|
613 | } |
|
614 | # export result to Excel file and then encode the file to base64 string |
|
615 | if not is_quick_mode: |
|
616 | result['excel_bytes_base64'] = excelexporters.storecarbon.export(result, |
|
617 | store['name'], |
|
618 | base_period_start_datetime_local, |
|
619 | base_period_end_datetime_local, |
|
620 | reporting_period_start_datetime_local, |
|
621 | reporting_period_end_datetime_local, |
|
622 | period_type, |
|
623 | language) |
|
624 | resp.text = json.dumps(result) |
|
625 |