Conditions | 114 |
Total Lines | 538 |
Code Lines | 403 |
Lines | 538 |
Ratio | 100 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like reports.shopfloorcarbon.Reporting.on_get() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | import re |
||
35 | @staticmethod |
||
36 | def on_get(req, resp): |
||
37 | print(req.params) |
||
38 | shopfloor_id = req.params.get('shopfloorid') |
||
39 | shopfloor_uuid = req.params.get('shopflooruuid') |
||
40 | period_type = req.params.get('periodtype') |
||
41 | base_start_datetime_local = req.params.get('baseperiodstartdatetime') |
||
42 | base_end_datetime_local = req.params.get('baseperiodenddatetime') |
||
43 | reporting_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
||
44 | reporting_end_datetime_local = req.params.get('reportingperiodenddatetime') |
||
45 | |||
46 | ################################################################################################################ |
||
47 | # Step 1: valid parameters |
||
48 | ################################################################################################################ |
||
49 | if shopfloor_id is None and shopfloor_uuid is None: |
||
50 | raise falcon.HTTPError(falcon.HTTP_400, |
||
51 | title='API.BAD_REQUEST', |
||
52 | description='API.INVALID_SHOPFLOOR_ID') |
||
53 | |||
54 | if shopfloor_id is not None: |
||
55 | shopfloor_id = str.strip(shopfloor_id) |
||
56 | if not shopfloor_id.isdigit() or int(shopfloor_id) <= 0: |
||
57 | raise falcon.HTTPError(falcon.HTTP_400, |
||
58 | title='API.BAD_REQUEST', |
||
59 | description='API.INVALID_SHOPFLOOR_ID') |
||
60 | |||
61 | if shopfloor_uuid is not None: |
||
62 | regex = re.compile('^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
||
63 | match = regex.match(str.strip(shopfloor_uuid)) |
||
64 | if not bool(match): |
||
65 | raise falcon.HTTPError(falcon.HTTP_400, |
||
66 | title='API.BAD_REQUEST', |
||
67 | description='API.INVALID_SHOPFLOOR_UUID') |
||
68 | |||
69 | if period_type is None: |
||
70 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
71 | else: |
||
72 | period_type = str.strip(period_type) |
||
73 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
||
74 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
75 | |||
76 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
77 | if config.utc_offset[0] == '-': |
||
78 | timezone_offset = -timezone_offset |
||
79 | |||
80 | base_start_datetime_utc = None |
||
81 | if base_start_datetime_local is not None and len(str.strip(base_start_datetime_local)) > 0: |
||
82 | base_start_datetime_local = str.strip(base_start_datetime_local) |
||
83 | try: |
||
84 | base_start_datetime_utc = datetime.strptime(base_start_datetime_local, |
||
85 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
86 | timedelta(minutes=timezone_offset) |
||
87 | except ValueError: |
||
88 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
89 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
||
90 | |||
91 | base_end_datetime_utc = None |
||
92 | if base_end_datetime_local is not None and len(str.strip(base_end_datetime_local)) > 0: |
||
93 | base_end_datetime_local = str.strip(base_end_datetime_local) |
||
94 | try: |
||
95 | base_end_datetime_utc = datetime.strptime(base_end_datetime_local, |
||
96 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
97 | timedelta(minutes=timezone_offset) |
||
98 | except ValueError: |
||
99 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
100 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
||
101 | |||
102 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
||
103 | base_start_datetime_utc >= base_end_datetime_utc: |
||
104 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
105 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
||
106 | |||
107 | if reporting_start_datetime_local is None: |
||
108 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
109 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
110 | else: |
||
111 | reporting_start_datetime_local = str.strip(reporting_start_datetime_local) |
||
112 | try: |
||
113 | reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, |
||
114 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
115 | timedelta(minutes=timezone_offset) |
||
116 | except ValueError: |
||
117 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
118 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
119 | |||
120 | if reporting_end_datetime_local is None: |
||
121 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
122 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
123 | else: |
||
124 | reporting_end_datetime_local = str.strip(reporting_end_datetime_local) |
||
125 | try: |
||
126 | reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, |
||
127 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
128 | timedelta(minutes=timezone_offset) |
||
129 | except ValueError: |
||
130 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
131 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
132 | |||
133 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
134 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
135 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
||
136 | |||
137 | ################################################################################################################ |
||
138 | # Step 2: query the shopfloor |
||
139 | ################################################################################################################ |
||
140 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
141 | cursor_system = cnx_system.cursor() |
||
142 | |||
143 | cnx_carbon = mysql.connector.connect(**config.myems_carbon_db) |
||
144 | cursor_carbon = cnx_carbon.cursor() |
||
145 | |||
146 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
147 | cursor_historical = cnx_historical.cursor() |
||
148 | |||
149 | if shopfloor_id is not None: |
||
150 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
151 | " FROM tbl_shopfloors " |
||
152 | " WHERE id = %s ", (shopfloor_id,)) |
||
153 | row_shopfloor = cursor_system.fetchone() |
||
154 | elif shopfloor_uuid is not None: |
||
155 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
156 | " FROM tbl_shopfloors " |
||
157 | " WHERE uuid = %s ", (shopfloor_uuid,)) |
||
158 | row_shopfloor = cursor_system.fetchone() |
||
159 | |||
160 | if row_shopfloor is None: |
||
161 | if cursor_system: |
||
162 | cursor_system.close() |
||
163 | if cnx_system: |
||
164 | cnx_system.close() |
||
165 | |||
166 | if cursor_carbon: |
||
167 | cursor_carbon.close() |
||
168 | if cnx_carbon: |
||
169 | cnx_carbon.close() |
||
170 | |||
171 | if cursor_historical: |
||
172 | cursor_historical.close() |
||
173 | if cnx_historical: |
||
174 | cnx_historical.close() |
||
175 | raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.SHOPFLOOR_NOT_FOUND') |
||
176 | |||
177 | shopfloor = dict() |
||
178 | shopfloor['id'] = row_shopfloor[0] |
||
179 | shopfloor['name'] = row_shopfloor[1] |
||
180 | shopfloor['area'] = row_shopfloor[2] |
||
181 | shopfloor['cost_center_id'] = row_shopfloor[3] |
||
182 | |||
183 | ################################################################################################################ |
||
184 | # Step 3: query energy categories |
||
185 | ################################################################################################################ |
||
186 | energy_category_set = set() |
||
187 | # query energy categories in base period |
||
188 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
||
189 | " FROM tbl_shopfloor_input_category_hourly " |
||
190 | " WHERE shopfloor_id = %s " |
||
191 | " AND start_datetime_utc >= %s " |
||
192 | " AND start_datetime_utc < %s ", |
||
193 | (shopfloor['id'], base_start_datetime_utc, base_end_datetime_utc)) |
||
194 | rows_energy_categories = cursor_carbon.fetchall() |
||
195 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
196 | for row_energy_category in rows_energy_categories: |
||
197 | energy_category_set.add(row_energy_category[0]) |
||
198 | |||
199 | # query energy categories in reporting period |
||
200 | cursor_carbon.execute(" SELECT DISTINCT(energy_category_id) " |
||
201 | " FROM tbl_shopfloor_input_category_hourly " |
||
202 | " WHERE shopfloor_id = %s " |
||
203 | " AND start_datetime_utc >= %s " |
||
204 | " AND start_datetime_utc < %s ", |
||
205 | (shopfloor['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
||
206 | rows_energy_categories = cursor_carbon.fetchall() |
||
207 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
208 | for row_energy_category in rows_energy_categories: |
||
209 | energy_category_set.add(row_energy_category[0]) |
||
210 | |||
211 | # query all energy categories in base period and reporting period |
||
212 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
||
213 | " FROM tbl_energy_categories " |
||
214 | " ORDER BY id ", ) |
||
215 | rows_energy_categories = cursor_system.fetchall() |
||
216 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
||
217 | if cursor_system: |
||
218 | cursor_system.close() |
||
219 | if cnx_system: |
||
220 | cnx_system.close() |
||
221 | |||
222 | if cursor_carbon: |
||
223 | cursor_carbon.close() |
||
224 | if cnx_carbon: |
||
225 | cnx_carbon.close() |
||
226 | |||
227 | if cursor_historical: |
||
228 | cursor_historical.close() |
||
229 | if cnx_historical: |
||
230 | cnx_historical.close() |
||
231 | raise falcon.HTTPError(falcon.HTTP_404, |
||
232 | title='API.NOT_FOUND', |
||
233 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
||
234 | energy_category_dict = dict() |
||
235 | for row_energy_category in rows_energy_categories: |
||
236 | if row_energy_category[0] in energy_category_set: |
||
237 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
||
238 | "unit_of_measure": row_energy_category[2], |
||
239 | "kgce": row_energy_category[3], |
||
240 | "kgco2e": row_energy_category[4]} |
||
241 | |||
242 | ################################################################################################################ |
||
243 | # Step 4: query associated sensors |
||
244 | ################################################################################################################ |
||
245 | point_list = list() |
||
246 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
247 | " FROM tbl_shopfloors st, tbl_sensors se, tbl_shopfloors_sensors ss, " |
||
248 | " tbl_points p, tbl_sensors_points sp " |
||
249 | " WHERE st.id = %s AND st.id = ss.shopfloor_id AND ss.sensor_id = se.id " |
||
250 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
||
251 | " ORDER BY p.id ", (shopfloor['id'], )) |
||
252 | rows_points = cursor_system.fetchall() |
||
253 | if rows_points is not None and len(rows_points) > 0: |
||
254 | for row in rows_points: |
||
255 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
256 | |||
257 | ################################################################################################################ |
||
258 | # Step 5: query associated points |
||
259 | ################################################################################################################ |
||
260 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
261 | " FROM tbl_shopfloors s, tbl_shopfloors_points sp, tbl_points p " |
||
262 | " WHERE s.id = %s AND s.id = sp.shopfloor_id AND sp.point_id = p.id " |
||
263 | " ORDER BY p.id ", (shopfloor['id'], )) |
||
264 | rows_points = cursor_system.fetchall() |
||
265 | if rows_points is not None and len(rows_points) > 0: |
||
266 | for row in rows_points: |
||
267 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
268 | |||
269 | ################################################################################################################ |
||
270 | # Step 6: query base period energy carbon dioxide emissions |
||
271 | ################################################################################################################ |
||
272 | base = dict() |
||
273 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
274 | for energy_category_id in energy_category_set: |
||
275 | base[energy_category_id] = dict() |
||
276 | base[energy_category_id]['timestamps'] = list() |
||
277 | base[energy_category_id]['values'] = list() |
||
278 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
||
279 | |||
280 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
||
281 | " FROM tbl_shopfloor_input_category_hourly " |
||
282 | " WHERE shopfloor_id = %s " |
||
283 | " AND energy_category_id = %s " |
||
284 | " AND start_datetime_utc >= %s " |
||
285 | " AND start_datetime_utc < %s " |
||
286 | " ORDER BY start_datetime_utc ", |
||
287 | (shopfloor['id'], |
||
288 | energy_category_id, |
||
289 | base_start_datetime_utc, |
||
290 | base_end_datetime_utc)) |
||
291 | rows_shopfloor_hourly = cursor_carbon.fetchall() |
||
292 | |||
293 | rows_shopfloor_periodically = utilities.aggregate_hourly_data_by_period(rows_shopfloor_hourly, |
||
294 | base_start_datetime_utc, |
||
295 | base_end_datetime_utc, |
||
296 | period_type) |
||
297 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
||
298 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
299 | timedelta(minutes=timezone_offset) |
||
300 | if period_type == 'hourly': |
||
301 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
302 | elif period_type == 'daily': |
||
303 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
304 | elif period_type == 'weekly': |
||
305 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
306 | elif period_type == 'monthly': |
||
307 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
308 | elif period_type == 'yearly': |
||
309 | current_datetime = current_datetime_local.strftime('%Y') |
||
310 | |||
311 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
||
312 | else row_shopfloor_periodically[1] |
||
313 | base[energy_category_id]['timestamps'].append(current_datetime) |
||
314 | base[energy_category_id]['values'].append(actual_value) |
||
315 | base[energy_category_id]['subtotal'] += actual_value |
||
316 | |||
317 | ################################################################################################################ |
||
318 | # Step 7: query reporting period energy carbon dioxide emissions |
||
319 | ################################################################################################################ |
||
320 | reporting = dict() |
||
321 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
322 | for energy_category_id in energy_category_set: |
||
323 | reporting[energy_category_id] = dict() |
||
324 | reporting[energy_category_id]['timestamps'] = list() |
||
325 | reporting[energy_category_id]['values'] = list() |
||
326 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
||
327 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
||
328 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
||
329 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
||
330 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
||
331 | |||
332 | cursor_carbon.execute(" SELECT start_datetime_utc, actual_value " |
||
333 | " FROM tbl_shopfloor_input_category_hourly " |
||
334 | " WHERE shopfloor_id = %s " |
||
335 | " AND energy_category_id = %s " |
||
336 | " AND start_datetime_utc >= %s " |
||
337 | " AND start_datetime_utc < %s " |
||
338 | " ORDER BY start_datetime_utc ", |
||
339 | (shopfloor['id'], |
||
340 | energy_category_id, |
||
341 | reporting_start_datetime_utc, |
||
342 | reporting_end_datetime_utc)) |
||
343 | rows_shopfloor_hourly = cursor_carbon.fetchall() |
||
344 | |||
345 | rows_shopfloor_periodically = utilities.aggregate_hourly_data_by_period(rows_shopfloor_hourly, |
||
346 | reporting_start_datetime_utc, |
||
347 | reporting_end_datetime_utc, |
||
348 | period_type) |
||
349 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
||
350 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
351 | timedelta(minutes=timezone_offset) |
||
352 | if period_type == 'hourly': |
||
353 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
354 | elif period_type == 'daily': |
||
355 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
356 | elif period_type == 'weekly': |
||
357 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
358 | elif period_type == 'monthly': |
||
359 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
360 | elif period_type == 'yearly': |
||
361 | current_datetime = current_datetime_local.strftime('%Y') |
||
362 | |||
363 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
||
364 | else row_shopfloor_periodically[1] |
||
365 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
||
366 | reporting[energy_category_id]['values'].append(actual_value) |
||
367 | reporting[energy_category_id]['subtotal'] += actual_value |
||
368 | |||
369 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(shopfloor['cost_center_id'], |
||
370 | energy_category_id, |
||
371 | reporting_start_datetime_utc, |
||
372 | reporting_end_datetime_utc) |
||
373 | for row in rows_shopfloor_hourly: |
||
374 | peak_type = energy_category_tariff_dict.get(row[0], None) |
||
375 | if peak_type == 'toppeak': |
||
376 | reporting[energy_category_id]['toppeak'] += row[1] |
||
377 | elif peak_type == 'onpeak': |
||
378 | reporting[energy_category_id]['onpeak'] += row[1] |
||
379 | elif peak_type == 'midpeak': |
||
380 | reporting[energy_category_id]['midpeak'] += row[1] |
||
381 | elif peak_type == 'offpeak': |
||
382 | reporting[energy_category_id]['offpeak'] += row[1] |
||
383 | |||
384 | ################################################################################################################ |
||
385 | # Step 8: query tariff data |
||
386 | ################################################################################################################ |
||
387 | parameters_data = dict() |
||
388 | parameters_data['names'] = list() |
||
389 | parameters_data['timestamps'] = list() |
||
390 | parameters_data['values'] = list() |
||
391 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
392 | for energy_category_id in energy_category_set: |
||
393 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(shopfloor['cost_center_id'], |
||
394 | energy_category_id, |
||
395 | reporting_start_datetime_utc, |
||
396 | reporting_end_datetime_utc) |
||
397 | tariff_timestamp_list = list() |
||
398 | tariff_value_list = list() |
||
399 | for k, v in energy_category_tariff_dict.items(): |
||
400 | # convert k from utc to local |
||
401 | k = k + timedelta(minutes=timezone_offset) |
||
402 | tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) |
||
403 | tariff_value_list.append(v) |
||
404 | |||
405 | parameters_data['names'].append('TARIFF-' + energy_category_dict[energy_category_id]['name']) |
||
406 | parameters_data['timestamps'].append(tariff_timestamp_list) |
||
407 | parameters_data['values'].append(tariff_value_list) |
||
408 | |||
409 | ################################################################################################################ |
||
410 | # Step 9: query associated sensors and points data |
||
411 | ################################################################################################################ |
||
412 | for point in point_list: |
||
413 | point_values = [] |
||
414 | point_timestamps = [] |
||
415 | if point['object_type'] == 'ANALOG_VALUE': |
||
416 | query = (" SELECT utc_date_time, actual_value " |
||
417 | " FROM tbl_analog_value " |
||
418 | " WHERE point_id = %s " |
||
419 | " AND utc_date_time BETWEEN %s AND %s " |
||
420 | " ORDER BY utc_date_time ") |
||
421 | cursor_historical.execute(query, (point['id'], |
||
422 | reporting_start_datetime_utc, |
||
423 | reporting_end_datetime_utc)) |
||
424 | rows = cursor_historical.fetchall() |
||
425 | |||
426 | if rows is not None and len(rows) > 0: |
||
427 | for row in rows: |
||
428 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
429 | timedelta(minutes=timezone_offset) |
||
430 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
431 | point_timestamps.append(current_datetime) |
||
432 | point_values.append(row[1]) |
||
433 | |||
434 | elif point['object_type'] == 'ENERGY_VALUE': |
||
435 | query = (" SELECT utc_date_time, actual_value " |
||
436 | " FROM tbl_energy_value " |
||
437 | " WHERE point_id = %s " |
||
438 | " AND utc_date_time BETWEEN %s AND %s " |
||
439 | " ORDER BY utc_date_time ") |
||
440 | cursor_historical.execute(query, (point['id'], |
||
441 | reporting_start_datetime_utc, |
||
442 | reporting_end_datetime_utc)) |
||
443 | rows = cursor_historical.fetchall() |
||
444 | |||
445 | if rows is not None and len(rows) > 0: |
||
446 | for row in rows: |
||
447 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
448 | timedelta(minutes=timezone_offset) |
||
449 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
450 | point_timestamps.append(current_datetime) |
||
451 | point_values.append(row[1]) |
||
452 | elif point['object_type'] == 'DIGITAL_VALUE': |
||
453 | query = (" SELECT utc_date_time, actual_value " |
||
454 | " FROM tbl_digital_value " |
||
455 | " WHERE point_id = %s " |
||
456 | " AND utc_date_time BETWEEN %s AND %s " |
||
457 | " ORDER BY utc_date_time ") |
||
458 | cursor_historical.execute(query, (point['id'], |
||
459 | reporting_start_datetime_utc, |
||
460 | reporting_end_datetime_utc)) |
||
461 | rows = cursor_historical.fetchall() |
||
462 | |||
463 | if rows is not None and len(rows) > 0: |
||
464 | for row in rows: |
||
465 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
466 | timedelta(minutes=timezone_offset) |
||
467 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
468 | point_timestamps.append(current_datetime) |
||
469 | point_values.append(row[1]) |
||
470 | |||
471 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
||
472 | parameters_data['timestamps'].append(point_timestamps) |
||
473 | parameters_data['values'].append(point_values) |
||
474 | |||
475 | ################################################################################################################ |
||
476 | # Step 10: construct the report |
||
477 | ################################################################################################################ |
||
478 | if cursor_system: |
||
479 | cursor_system.close() |
||
480 | if cnx_system: |
||
481 | cnx_system.close() |
||
482 | |||
483 | if cursor_carbon: |
||
484 | cursor_carbon.close() |
||
485 | if cnx_carbon: |
||
486 | cnx_carbon.close() |
||
487 | |||
488 | if cursor_historical: |
||
489 | cursor_historical.close() |
||
490 | if cnx_historical: |
||
491 | cnx_historical.close() |
||
492 | |||
493 | result = dict() |
||
494 | |||
495 | result['shopfloor'] = dict() |
||
496 | result['shopfloor']['name'] = shopfloor['name'] |
||
497 | result['shopfloor']['area'] = shopfloor['area'] |
||
498 | |||
499 | result['base_period'] = dict() |
||
500 | result['base_period']['names'] = list() |
||
501 | result['base_period']['units'] = list() |
||
502 | result['base_period']['timestamps'] = list() |
||
503 | result['base_period']['values'] = list() |
||
504 | result['base_period']['subtotals'] = list() |
||
505 | result['base_period']['total'] = Decimal(0.0) |
||
506 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
507 | for energy_category_id in energy_category_set: |
||
508 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
509 | result['base_period']['units'].append('KG') |
||
510 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
||
511 | result['base_period']['values'].append(base[energy_category_id]['values']) |
||
512 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
||
513 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
||
514 | |||
515 | result['reporting_period'] = dict() |
||
516 | result['reporting_period']['names'] = list() |
||
517 | result['reporting_period']['energy_category_ids'] = list() |
||
518 | result['reporting_period']['units'] = list() |
||
519 | result['reporting_period']['timestamps'] = list() |
||
520 | result['reporting_period']['values'] = list() |
||
521 | result['reporting_period']['subtotals'] = list() |
||
522 | result['reporting_period']['subtotals_per_unit_area'] = list() |
||
523 | result['reporting_period']['toppeaks'] = list() |
||
524 | result['reporting_period']['onpeaks'] = list() |
||
525 | result['reporting_period']['midpeaks'] = list() |
||
526 | result['reporting_period']['offpeaks'] = list() |
||
527 | result['reporting_period']['increment_rates'] = list() |
||
528 | result['reporting_period']['total'] = Decimal(0.0) |
||
529 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
||
530 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
||
531 | result['reporting_period']['total_unit'] = 'KG' |
||
532 | |||
533 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
534 | for energy_category_id in energy_category_set: |
||
535 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
536 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
||
537 | result['reporting_period']['units'].append('KG') |
||
538 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
||
539 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
||
540 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
||
541 | result['reporting_period']['subtotals_per_unit_area'].append( |
||
542 | reporting[energy_category_id]['subtotal'] / shopfloor['area'] if shopfloor['area'] > 0.0 else None) |
||
543 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
||
544 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
||
545 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
||
546 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
||
547 | result['reporting_period']['increment_rates'].append( |
||
548 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
||
549 | base[energy_category_id]['subtotal'] |
||
550 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
||
551 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
||
552 | |||
553 | result['reporting_period']['total_per_unit_area'] = \ |
||
554 | result['reporting_period']['total'] / shopfloor['area'] if shopfloor['area'] > 0.0 else None |
||
555 | |||
556 | result['reporting_period']['total_increment_rate'] = \ |
||
557 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
||
558 | result['base_period']['total'] \ |
||
559 | if result['base_period']['total'] > Decimal(0.0) else None |
||
560 | |||
561 | result['parameters'] = { |
||
562 | "names": parameters_data['names'], |
||
563 | "timestamps": parameters_data['timestamps'], |
||
564 | "values": parameters_data['values'] |
||
565 | } |
||
566 | # export result to Excel file and then encode the file to base64 string |
||
567 | result['excel_bytes_base64'] = excelexporters.shopfloorcarbon.export(result, |
||
568 | shopfloor['name'], |
||
569 | reporting_start_datetime_local, |
||
570 | reporting_end_datetime_local, |
||
571 | period_type) |
||
572 | resp.text = json.dumps(result) |
||
573 |