Total Complexity | 109 |
Total Lines | 623 |
Duplicated Lines | 98.23 % |
Changes | 0 |
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like reports.shopfloorstatistics often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | import falcon |
||
2 | import simplejson as json |
||
3 | import mysql.connector |
||
4 | import config |
||
5 | from datetime import datetime, timedelta, timezone |
||
6 | from core import utilities |
||
7 | from decimal import Decimal |
||
8 | import excelexporters.shopfloorstatistics |
||
9 | |||
10 | |||
11 | View Code Duplication | class Reporting: |
|
|
|||
12 | @staticmethod |
||
13 | def __init__(): |
||
14 | pass |
||
15 | |||
16 | @staticmethod |
||
17 | def on_options(req, resp): |
||
18 | resp.status = falcon.HTTP_200 |
||
19 | |||
20 | #################################################################################################################### |
||
21 | # PROCEDURES |
||
22 | # Step 1: valid parameters |
||
23 | # Step 2: query the shopfloor |
||
24 | # Step 3: query energy categories |
||
25 | # Step 4: query associated sensors |
||
26 | # Step 5: query associated points |
||
27 | # Step 6: query base period energy input |
||
28 | # Step 7: query reporting period energy input |
||
29 | # Step 8: query tariff data |
||
30 | # Step 9: query associated sensors and points data |
||
31 | # Step 10: construct the report |
||
32 | #################################################################################################################### |
||
33 | @staticmethod |
||
34 | def on_get(req, resp): |
||
35 | print(req.params) |
||
36 | shopfloor_id = req.params.get('shopfloorid') |
||
37 | period_type = req.params.get('periodtype') |
||
38 | base_start_datetime_local = req.params.get('baseperiodstartdatetime') |
||
39 | base_end_datetime_local = req.params.get('baseperiodenddatetime') |
||
40 | reporting_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
||
41 | reporting_end_datetime_local = req.params.get('reportingperiodenddatetime') |
||
42 | |||
43 | ################################################################################################################ |
||
44 | # Step 1: valid parameters |
||
45 | ################################################################################################################ |
||
46 | if shopfloor_id is None: |
||
47 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_SHOPFLOOR_ID') |
||
48 | else: |
||
49 | shopfloor_id = str.strip(shopfloor_id) |
||
50 | if not shopfloor_id.isdigit() or int(shopfloor_id) <= 0: |
||
51 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_SHOPFLOOR_ID') |
||
52 | |||
53 | if period_type is None: |
||
54 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
55 | else: |
||
56 | period_type = str.strip(period_type) |
||
57 | if period_type not in ['hourly', 'daily', 'monthly', 'yearly']: |
||
58 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
59 | |||
60 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
61 | if config.utc_offset[0] == '-': |
||
62 | timezone_offset = -timezone_offset |
||
63 | |||
64 | base_start_datetime_utc = None |
||
65 | if base_start_datetime_local is not None and len(str.strip(base_start_datetime_local)) > 0: |
||
66 | base_start_datetime_local = str.strip(base_start_datetime_local) |
||
67 | try: |
||
68 | base_start_datetime_utc = datetime.strptime(base_start_datetime_local, |
||
69 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
70 | timedelta(minutes=timezone_offset) |
||
71 | except ValueError: |
||
72 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
73 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
||
74 | |||
75 | base_end_datetime_utc = None |
||
76 | if base_end_datetime_local is not None and len(str.strip(base_end_datetime_local)) > 0: |
||
77 | base_end_datetime_local = str.strip(base_end_datetime_local) |
||
78 | try: |
||
79 | base_end_datetime_utc = datetime.strptime(base_end_datetime_local, |
||
80 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
81 | timedelta(minutes=timezone_offset) |
||
82 | except ValueError: |
||
83 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
84 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
||
85 | |||
86 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
||
87 | base_start_datetime_utc >= base_end_datetime_utc: |
||
88 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
89 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
||
90 | |||
91 | if reporting_start_datetime_local is None: |
||
92 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
93 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
94 | else: |
||
95 | reporting_start_datetime_local = str.strip(reporting_start_datetime_local) |
||
96 | try: |
||
97 | reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, |
||
98 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
99 | timedelta(minutes=timezone_offset) |
||
100 | except ValueError: |
||
101 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
102 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
103 | |||
104 | if reporting_end_datetime_local is None: |
||
105 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
106 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
107 | else: |
||
108 | reporting_end_datetime_local = str.strip(reporting_end_datetime_local) |
||
109 | try: |
||
110 | reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, |
||
111 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
112 | timedelta(minutes=timezone_offset) |
||
113 | except ValueError: |
||
114 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
115 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
116 | |||
117 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
118 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
119 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
||
120 | |||
121 | ################################################################################################################ |
||
122 | # Step 2: query the shopfloor |
||
123 | ################################################################################################################ |
||
124 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
125 | cursor_system = cnx_system.cursor() |
||
126 | |||
127 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
||
128 | cursor_energy = cnx_energy.cursor() |
||
129 | |||
130 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
131 | cursor_historical = cnx_historical.cursor() |
||
132 | |||
133 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
134 | " FROM tbl_shopfloors " |
||
135 | " WHERE id = %s ", (shopfloor_id,)) |
||
136 | row_shopfloor = cursor_system.fetchone() |
||
137 | if row_shopfloor is None: |
||
138 | if cursor_system: |
||
139 | cursor_system.close() |
||
140 | if cnx_system: |
||
141 | cnx_system.disconnect() |
||
142 | |||
143 | if cursor_energy: |
||
144 | cursor_energy.close() |
||
145 | if cnx_energy: |
||
146 | cnx_energy.disconnect() |
||
147 | |||
148 | if cnx_historical: |
||
149 | cnx_historical.close() |
||
150 | if cursor_historical: |
||
151 | cursor_historical.disconnect() |
||
152 | raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.SHOPFLOOR_NOT_FOUND') |
||
153 | |||
154 | shopfloor = dict() |
||
155 | shopfloor['id'] = row_shopfloor[0] |
||
156 | shopfloor['name'] = row_shopfloor[1] |
||
157 | shopfloor['area'] = row_shopfloor[2] |
||
158 | shopfloor['cost_center_id'] = row_shopfloor[3] |
||
159 | |||
160 | ################################################################################################################ |
||
161 | # Step 3: query energy categories |
||
162 | ################################################################################################################ |
||
163 | energy_category_set = set() |
||
164 | # query energy categories in base period |
||
165 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
166 | " FROM tbl_shopfloor_input_category_hourly " |
||
167 | " WHERE shopfloor_id = %s " |
||
168 | " AND start_datetime_utc >= %s " |
||
169 | " AND start_datetime_utc < %s ", |
||
170 | (shopfloor['id'], base_start_datetime_utc, base_end_datetime_utc)) |
||
171 | rows_energy_categories = cursor_energy.fetchall() |
||
172 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
173 | for row_energy_category in rows_energy_categories: |
||
174 | energy_category_set.add(row_energy_category[0]) |
||
175 | |||
176 | # query energy categories in reporting period |
||
177 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
178 | " FROM tbl_shopfloor_input_category_hourly " |
||
179 | " WHERE shopfloor_id = %s " |
||
180 | " AND start_datetime_utc >= %s " |
||
181 | " AND start_datetime_utc < %s ", |
||
182 | (shopfloor['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
||
183 | rows_energy_categories = cursor_energy.fetchall() |
||
184 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
185 | for row_energy_category in rows_energy_categories: |
||
186 | energy_category_set.add(row_energy_category[0]) |
||
187 | |||
188 | # query all energy categories in base period and reporting period |
||
189 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
||
190 | " FROM tbl_energy_categories " |
||
191 | " ORDER BY id ", ) |
||
192 | rows_energy_categories = cursor_system.fetchall() |
||
193 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
||
194 | if cursor_system: |
||
195 | cursor_system.close() |
||
196 | if cnx_system: |
||
197 | cnx_system.disconnect() |
||
198 | |||
199 | if cursor_energy: |
||
200 | cursor_energy.close() |
||
201 | if cnx_energy: |
||
202 | cnx_energy.disconnect() |
||
203 | |||
204 | if cnx_historical: |
||
205 | cnx_historical.close() |
||
206 | if cursor_historical: |
||
207 | cursor_historical.disconnect() |
||
208 | raise falcon.HTTPError(falcon.HTTP_404, |
||
209 | title='API.NOT_FOUND', |
||
210 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
||
211 | energy_category_dict = dict() |
||
212 | for row_energy_category in rows_energy_categories: |
||
213 | if row_energy_category[0] in energy_category_set: |
||
214 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
||
215 | "unit_of_measure": row_energy_category[2], |
||
216 | "kgce": row_energy_category[3], |
||
217 | "kgco2e": row_energy_category[4]} |
||
218 | |||
219 | ################################################################################################################ |
||
220 | # Step 4: query associated sensors |
||
221 | ################################################################################################################ |
||
222 | point_list = list() |
||
223 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
224 | " FROM tbl_shopfloors st, tbl_sensors se, tbl_shopfloors_sensors ss, " |
||
225 | " tbl_points p, tbl_sensors_points sp " |
||
226 | " WHERE st.id = %s AND st.id = ss.shopfloor_id AND ss.sensor_id = se.id " |
||
227 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
||
228 | " ORDER BY p.id ", (shopfloor['id'],)) |
||
229 | rows_points = cursor_system.fetchall() |
||
230 | if rows_points is not None and len(rows_points) > 0: |
||
231 | for row in rows_points: |
||
232 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
233 | |||
234 | ################################################################################################################ |
||
235 | # Step 5: query associated points |
||
236 | ################################################################################################################ |
||
237 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
238 | " FROM tbl_shopfloors s, tbl_shopfloors_points sp, tbl_points p " |
||
239 | " WHERE s.id = %s AND s.id = sp.shopfloor_id AND sp.point_id = p.id " |
||
240 | " ORDER BY p.id ", (shopfloor['id'],)) |
||
241 | rows_points = cursor_system.fetchall() |
||
242 | if rows_points is not None and len(rows_points) > 0: |
||
243 | for row in rows_points: |
||
244 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
245 | |||
246 | ################################################################################################################ |
||
247 | # Step 6: query base period energy input |
||
248 | ################################################################################################################ |
||
249 | base = dict() |
||
250 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
251 | for energy_category_id in energy_category_set: |
||
252 | base[energy_category_id] = dict() |
||
253 | base[energy_category_id]['timestamps'] = list() |
||
254 | base[energy_category_id]['values'] = list() |
||
255 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
||
256 | base[energy_category_id]['mean'] = None |
||
257 | base[energy_category_id]['median'] = None |
||
258 | base[energy_category_id]['minimum'] = None |
||
259 | base[energy_category_id]['maximum'] = None |
||
260 | base[energy_category_id]['stdev'] = None |
||
261 | base[energy_category_id]['variance'] = None |
||
262 | |||
263 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
264 | " FROM tbl_shopfloor_input_category_hourly " |
||
265 | " WHERE shopfloor_id = %s " |
||
266 | " AND energy_category_id = %s " |
||
267 | " AND start_datetime_utc >= %s " |
||
268 | " AND start_datetime_utc < %s " |
||
269 | " ORDER BY start_datetime_utc ", |
||
270 | (shopfloor['id'], |
||
271 | energy_category_id, |
||
272 | base_start_datetime_utc, |
||
273 | base_end_datetime_utc)) |
||
274 | rows_shopfloor_hourly = cursor_energy.fetchall() |
||
275 | |||
276 | rows_shopfloor_periodically, \ |
||
277 | base[energy_category_id]['mean'], \ |
||
278 | base[energy_category_id]['median'], \ |
||
279 | base[energy_category_id]['minimum'], \ |
||
280 | base[energy_category_id]['maximum'], \ |
||
281 | base[energy_category_id]['stdev'], \ |
||
282 | base[energy_category_id]['variance'] = \ |
||
283 | utilities.statistics_hourly_data_by_period(rows_shopfloor_hourly, |
||
284 | base_start_datetime_utc, |
||
285 | base_end_datetime_utc, |
||
286 | period_type) |
||
287 | |||
288 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
||
289 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
290 | timedelta(minutes=timezone_offset) |
||
291 | if period_type == 'hourly': |
||
292 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
293 | elif period_type == 'daily': |
||
294 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
295 | elif period_type == 'monthly': |
||
296 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
297 | elif period_type == 'yearly': |
||
298 | current_datetime = current_datetime_local.strftime('%Y') |
||
299 | |||
300 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
||
301 | else row_shopfloor_periodically[1] |
||
302 | base[energy_category_id]['timestamps'].append(current_datetime) |
||
303 | base[energy_category_id]['values'].append(actual_value) |
||
304 | base[energy_category_id]['subtotal'] += actual_value |
||
305 | |||
306 | ################################################################################################################ |
||
307 | # Step 7: query reporting period energy input |
||
308 | ################################################################################################################ |
||
309 | reporting = dict() |
||
310 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
311 | for energy_category_id in energy_category_set: |
||
312 | reporting[energy_category_id] = dict() |
||
313 | reporting[energy_category_id]['timestamps'] = list() |
||
314 | reporting[energy_category_id]['values'] = list() |
||
315 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
||
316 | reporting[energy_category_id]['mean'] = None |
||
317 | reporting[energy_category_id]['median'] = None |
||
318 | reporting[energy_category_id]['minimum'] = None |
||
319 | reporting[energy_category_id]['maximum'] = None |
||
320 | reporting[energy_category_id]['stdev'] = None |
||
321 | reporting[energy_category_id]['variance'] = None |
||
322 | |||
323 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
324 | " FROM tbl_shopfloor_input_category_hourly " |
||
325 | " WHERE shopfloor_id = %s " |
||
326 | " AND energy_category_id = %s " |
||
327 | " AND start_datetime_utc >= %s " |
||
328 | " AND start_datetime_utc < %s " |
||
329 | " ORDER BY start_datetime_utc ", |
||
330 | (shopfloor['id'], |
||
331 | energy_category_id, |
||
332 | reporting_start_datetime_utc, |
||
333 | reporting_end_datetime_utc)) |
||
334 | rows_shopfloor_hourly = cursor_energy.fetchall() |
||
335 | |||
336 | rows_shopfloor_periodically, \ |
||
337 | reporting[energy_category_id]['mean'], \ |
||
338 | reporting[energy_category_id]['median'], \ |
||
339 | reporting[energy_category_id]['minimum'], \ |
||
340 | reporting[energy_category_id]['maximum'], \ |
||
341 | reporting[energy_category_id]['stdev'], \ |
||
342 | reporting[energy_category_id]['variance'] = \ |
||
343 | utilities.statistics_hourly_data_by_period(rows_shopfloor_hourly, |
||
344 | reporting_start_datetime_utc, |
||
345 | reporting_end_datetime_utc, |
||
346 | period_type) |
||
347 | |||
348 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
||
349 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
350 | timedelta(minutes=timezone_offset) |
||
351 | if period_type == 'hourly': |
||
352 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
353 | elif period_type == 'daily': |
||
354 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
355 | elif period_type == 'monthly': |
||
356 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
357 | elif period_type == 'yearly': |
||
358 | current_datetime = current_datetime_local.strftime('%Y') |
||
359 | |||
360 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
||
361 | else row_shopfloor_periodically[1] |
||
362 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
||
363 | reporting[energy_category_id]['values'].append(actual_value) |
||
364 | reporting[energy_category_id]['subtotal'] += actual_value |
||
365 | |||
366 | ################################################################################################################ |
||
367 | # Step 8: query tariff data |
||
368 | ################################################################################################################ |
||
369 | parameters_data = dict() |
||
370 | parameters_data['names'] = list() |
||
371 | parameters_data['timestamps'] = list() |
||
372 | parameters_data['values'] = list() |
||
373 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
374 | for energy_category_id in energy_category_set: |
||
375 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(shopfloor['cost_center_id'], |
||
376 | energy_category_id, |
||
377 | reporting_start_datetime_utc, |
||
378 | reporting_end_datetime_utc) |
||
379 | tariff_timestamp_list = list() |
||
380 | tariff_value_list = list() |
||
381 | for k, v in energy_category_tariff_dict.items(): |
||
382 | # convert k from utc to local |
||
383 | k = k + timedelta(minutes=timezone_offset) |
||
384 | tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) |
||
385 | tariff_value_list.append(v) |
||
386 | |||
387 | parameters_data['names'].append('TARIFF-' + energy_category_dict[energy_category_id]['name']) |
||
388 | parameters_data['timestamps'].append(tariff_timestamp_list) |
||
389 | parameters_data['values'].append(tariff_value_list) |
||
390 | |||
391 | ################################################################################################################ |
||
392 | # Step 9: query associated sensors and points data |
||
393 | ################################################################################################################ |
||
394 | for point in point_list: |
||
395 | point_values = [] |
||
396 | point_timestamps = [] |
||
397 | if point['object_type'] == 'ANALOG_VALUE': |
||
398 | query = (" SELECT utc_date_time, actual_value " |
||
399 | " FROM tbl_analog_value " |
||
400 | " WHERE point_id = %s " |
||
401 | " AND utc_date_time BETWEEN %s AND %s " |
||
402 | " ORDER BY utc_date_time ") |
||
403 | cursor_historical.execute(query, (point['id'], |
||
404 | reporting_start_datetime_utc, |
||
405 | reporting_end_datetime_utc)) |
||
406 | rows = cursor_historical.fetchall() |
||
407 | |||
408 | if rows is not None and len(rows) > 0: |
||
409 | for row in rows: |
||
410 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
411 | timedelta(minutes=timezone_offset) |
||
412 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
413 | point_timestamps.append(current_datetime) |
||
414 | point_values.append(row[1]) |
||
415 | |||
416 | elif point['object_type'] == 'ENERGY_VALUE': |
||
417 | query = (" SELECT utc_date_time, actual_value " |
||
418 | " FROM tbl_energy_value " |
||
419 | " WHERE point_id = %s " |
||
420 | " AND utc_date_time BETWEEN %s AND %s " |
||
421 | " ORDER BY utc_date_time ") |
||
422 | cursor_historical.execute(query, (point['id'], |
||
423 | reporting_start_datetime_utc, |
||
424 | reporting_end_datetime_utc)) |
||
425 | rows = cursor_historical.fetchall() |
||
426 | |||
427 | if rows is not None and len(rows) > 0: |
||
428 | for row in rows: |
||
429 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
430 | timedelta(minutes=timezone_offset) |
||
431 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
432 | point_timestamps.append(current_datetime) |
||
433 | point_values.append(row[1]) |
||
434 | elif point['object_type'] == 'DIGITAL_VALUE': |
||
435 | query = (" SELECT utc_date_time, actual_value " |
||
436 | " FROM tbl_digital_value " |
||
437 | " WHERE point_id = %s " |
||
438 | " AND utc_date_time BETWEEN %s AND %s ") |
||
439 | cursor_historical.execute(query, (point['id'], |
||
440 | reporting_start_datetime_utc, |
||
441 | reporting_end_datetime_utc)) |
||
442 | rows = cursor_historical.fetchall() |
||
443 | |||
444 | if rows is not None and len(rows) > 0: |
||
445 | for row in rows: |
||
446 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
447 | timedelta(minutes=timezone_offset) |
||
448 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
449 | point_timestamps.append(current_datetime) |
||
450 | point_values.append(row[1]) |
||
451 | |||
452 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
||
453 | parameters_data['timestamps'].append(point_timestamps) |
||
454 | parameters_data['values'].append(point_values) |
||
455 | |||
456 | ################################################################################################################ |
||
457 | # Step 10: construct the report |
||
458 | ################################################################################################################ |
||
459 | if cursor_system: |
||
460 | cursor_system.close() |
||
461 | if cnx_system: |
||
462 | cnx_system.disconnect() |
||
463 | |||
464 | if cursor_energy: |
||
465 | cursor_energy.close() |
||
466 | if cnx_energy: |
||
467 | cnx_energy.disconnect() |
||
468 | |||
469 | result = dict() |
||
470 | |||
471 | result['shopfloor'] = dict() |
||
472 | result['shopfloor']['name'] = shopfloor['name'] |
||
473 | result['shopfloor']['area'] = shopfloor['area'] |
||
474 | |||
475 | result['base_period'] = dict() |
||
476 | result['base_period']['names'] = list() |
||
477 | result['base_period']['units'] = list() |
||
478 | result['base_period']['timestamps'] = list() |
||
479 | result['base_period']['values'] = list() |
||
480 | result['base_period']['subtotals'] = list() |
||
481 | result['base_period']['means'] = list() |
||
482 | result['base_period']['medians'] = list() |
||
483 | result['base_period']['minimums'] = list() |
||
484 | result['base_period']['maximums'] = list() |
||
485 | result['base_period']['stdevs'] = list() |
||
486 | result['base_period']['variances'] = list() |
||
487 | |||
488 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
489 | for energy_category_id in energy_category_set: |
||
490 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
491 | result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
492 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
||
493 | result['base_period']['values'].append(base[energy_category_id]['values']) |
||
494 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
||
495 | result['base_period']['means'].append(base[energy_category_id]['mean']) |
||
496 | result['base_period']['medians'].append(base[energy_category_id]['median']) |
||
497 | result['base_period']['minimums'].append(base[energy_category_id]['minimum']) |
||
498 | result['base_period']['maximums'].append(base[energy_category_id]['maximum']) |
||
499 | result['base_period']['stdevs'].append(base[energy_category_id]['stdev']) |
||
500 | result['base_period']['variances'].append(base[energy_category_id]['variance']) |
||
501 | |||
502 | result['reporting_period'] = dict() |
||
503 | result['reporting_period']['names'] = list() |
||
504 | result['reporting_period']['energy_category_ids'] = list() |
||
505 | result['reporting_period']['units'] = list() |
||
506 | result['reporting_period']['timestamps'] = list() |
||
507 | result['reporting_period']['values'] = list() |
||
508 | result['reporting_period']['subtotals'] = list() |
||
509 | result['reporting_period']['means'] = list() |
||
510 | result['reporting_period']['means_per_unit_area'] = list() |
||
511 | result['reporting_period']['means_increment_rate'] = list() |
||
512 | result['reporting_period']['medians'] = list() |
||
513 | result['reporting_period']['medians_per_unit_area'] = list() |
||
514 | result['reporting_period']['medians_increment_rate'] = list() |
||
515 | result['reporting_period']['minimums'] = list() |
||
516 | result['reporting_period']['minimums_per_unit_area'] = list() |
||
517 | result['reporting_period']['minimums_increment_rate'] = list() |
||
518 | result['reporting_period']['maximums'] = list() |
||
519 | result['reporting_period']['maximums_per_unit_area'] = list() |
||
520 | result['reporting_period']['maximums_increment_rate'] = list() |
||
521 | result['reporting_period']['stdevs'] = list() |
||
522 | result['reporting_period']['stdevs_per_unit_area'] = list() |
||
523 | result['reporting_period']['stdevs_increment_rate'] = list() |
||
524 | result['reporting_period']['variances'] = list() |
||
525 | result['reporting_period']['variances_per_unit_area'] = list() |
||
526 | result['reporting_period']['variances_increment_rate'] = list() |
||
527 | |||
528 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
529 | for energy_category_id in energy_category_set: |
||
530 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
531 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
||
532 | result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
533 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
||
534 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
||
535 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
||
536 | result['reporting_period']['means'].append(reporting[energy_category_id]['mean']) |
||
537 | result['reporting_period']['means_per_unit_area'].append( |
||
538 | reporting[energy_category_id]['mean'] / shopfloor['area'] |
||
539 | if reporting[energy_category_id]['mean'] is not None and |
||
540 | shopfloor['area'] is not None and |
||
541 | shopfloor['area'] > Decimal(0.0) |
||
542 | else None) |
||
543 | result['reporting_period']['means_increment_rate'].append( |
||
544 | (reporting[energy_category_id]['mean'] - base[energy_category_id]['mean']) / |
||
545 | base[energy_category_id]['mean'] if (base[energy_category_id]['mean'] is not None and |
||
546 | base[energy_category_id]['mean'] > Decimal(0.0)) |
||
547 | else None) |
||
548 | result['reporting_period']['medians'].append(reporting[energy_category_id]['median']) |
||
549 | result['reporting_period']['medians_per_unit_area'].append( |
||
550 | reporting[energy_category_id]['median'] / shopfloor['area'] |
||
551 | if reporting[energy_category_id]['median'] is not None and |
||
552 | shopfloor['area'] is not None and |
||
553 | shopfloor['area'] > Decimal(0.0) |
||
554 | else None) |
||
555 | result['reporting_period']['medians_increment_rate'].append( |
||
556 | (reporting[energy_category_id]['median'] - base[energy_category_id]['median']) / |
||
557 | base[energy_category_id]['median'] if (base[energy_category_id]['median'] is not None and |
||
558 | base[energy_category_id]['median'] > Decimal(0.0)) |
||
559 | else None) |
||
560 | result['reporting_period']['minimums'].append(reporting[energy_category_id]['minimum']) |
||
561 | result['reporting_period']['minimums_per_unit_area'].append( |
||
562 | reporting[energy_category_id]['minimum'] / shopfloor['area'] |
||
563 | if reporting[energy_category_id]['minimum'] is not None and |
||
564 | shopfloor['area'] is not None and |
||
565 | shopfloor['area'] > Decimal(0.0) |
||
566 | else None) |
||
567 | result['reporting_period']['minimums_increment_rate'].append( |
||
568 | (reporting[energy_category_id]['minimum'] - base[energy_category_id]['minimum']) / |
||
569 | base[energy_category_id]['minimum'] if (base[energy_category_id]['minimum'] is not None and |
||
570 | base[energy_category_id]['minimum'] > Decimal(0.0)) |
||
571 | else None) |
||
572 | result['reporting_period']['maximums'].append(reporting[energy_category_id]['maximum']) |
||
573 | result['reporting_period']['maximums_per_unit_area'].append( |
||
574 | reporting[energy_category_id]['maximum'] / shopfloor['area'] |
||
575 | if reporting[energy_category_id]['maximum'] is not None and |
||
576 | shopfloor['area'] is not None and |
||
577 | shopfloor['area'] > Decimal(0.0) |
||
578 | else None) |
||
579 | result['reporting_period']['maximums_increment_rate'].append( |
||
580 | (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / |
||
581 | base[energy_category_id]['maximum'] if (base[energy_category_id]['maximum'] is not None and |
||
582 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
||
583 | else None) |
||
584 | result['reporting_period']['stdevs'].append(reporting[energy_category_id]['stdev']) |
||
585 | result['reporting_period']['stdevs_per_unit_area'].append( |
||
586 | reporting[energy_category_id]['stdev'] / shopfloor['area'] |
||
587 | if reporting[energy_category_id]['stdev'] is not None and |
||
588 | shopfloor['area'] is not None and |
||
589 | shopfloor['area'] > Decimal(0.0) |
||
590 | else None) |
||
591 | result['reporting_period']['stdevs_increment_rate'].append( |
||
592 | (reporting[energy_category_id]['stdev'] - base[energy_category_id]['stdev']) / |
||
593 | base[energy_category_id]['stdev'] if (base[energy_category_id]['stdev'] is not None and |
||
594 | base[energy_category_id]['stdev'] > Decimal(0.0)) |
||
595 | else None) |
||
596 | result['reporting_period']['variances'].append(reporting[energy_category_id]['variance']) |
||
597 | result['reporting_period']['variances_per_unit_area'].append( |
||
598 | reporting[energy_category_id]['variance'] / shopfloor['area'] |
||
599 | if reporting[energy_category_id]['variance'] is not None and |
||
600 | shopfloor['area'] is not None and |
||
601 | shopfloor['area'] > Decimal(0.0) |
||
602 | else None) |
||
603 | result['reporting_period']['variances_increment_rate'].append( |
||
604 | (reporting[energy_category_id]['variance'] - base[energy_category_id]['variance']) / |
||
605 | base[energy_category_id]['variance'] if (base[energy_category_id]['variance'] is not None and |
||
606 | base[energy_category_id]['variance'] > Decimal(0.0)) |
||
607 | else None) |
||
608 | |||
609 | result['parameters'] = { |
||
610 | "names": parameters_data['names'], |
||
611 | "timestamps": parameters_data['timestamps'], |
||
612 | "values": parameters_data['values'] |
||
613 | } |
||
614 | |||
615 | # export result to Excel file and then encode the file to base64 string |
||
616 | result['excel_bytes_base64'] = excelexporters.shopfloorstatistics.export(result, |
||
617 | shopfloor['name'], |
||
618 | reporting_start_datetime_local, |
||
619 | reporting_end_datetime_local, |
||
620 | period_type) |
||
621 | |||
622 | resp.body = json.dumps(result) |
||
623 |