Conditions | 104 |
Total Lines | 505 |
Code Lines | 375 |
Lines | 505 |
Ratio | 100 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like reports.storecost.Reporting.on_get() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | import falcon |
||
33 | @staticmethod |
||
34 | def on_get(req, resp): |
||
35 | print(req.params) |
||
36 | store_id = req.params.get('storeid') |
||
37 | period_type = req.params.get('periodtype') |
||
38 | base_start_datetime_local = req.params.get('baseperiodstartdatetime') |
||
39 | base_end_datetime_local = req.params.get('baseperiodenddatetime') |
||
40 | reporting_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
||
41 | reporting_end_datetime_local = req.params.get('reportingperiodenddatetime') |
||
42 | |||
43 | ################################################################################################################ |
||
44 | # Step 1: valid parameters |
||
45 | ################################################################################################################ |
||
46 | if store_id is None: |
||
47 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_STORE_ID') |
||
48 | else: |
||
49 | store_id = str.strip(store_id) |
||
50 | if not store_id.isdigit() or int(store_id) <= 0: |
||
51 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_STORE_ID') |
||
52 | |||
53 | if period_type is None: |
||
54 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
55 | else: |
||
56 | period_type = str.strip(period_type) |
||
57 | if period_type not in ['hourly', 'daily', 'monthly', 'yearly']: |
||
58 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
59 | |||
60 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
61 | if config.utc_offset[0] == '-': |
||
62 | timezone_offset = -timezone_offset |
||
63 | |||
64 | base_start_datetime_utc = None |
||
65 | if base_start_datetime_local is not None and len(str.strip(base_start_datetime_local)) > 0: |
||
66 | base_start_datetime_local = str.strip(base_start_datetime_local) |
||
67 | try: |
||
68 | base_start_datetime_utc = datetime.strptime(base_start_datetime_local, |
||
69 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
70 | timedelta(minutes=timezone_offset) |
||
71 | except ValueError: |
||
72 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
73 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
||
74 | |||
75 | base_end_datetime_utc = None |
||
76 | if base_end_datetime_local is not None and len(str.strip(base_end_datetime_local)) > 0: |
||
77 | base_end_datetime_local = str.strip(base_end_datetime_local) |
||
78 | try: |
||
79 | base_end_datetime_utc = datetime.strptime(base_end_datetime_local, |
||
80 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
81 | timedelta(minutes=timezone_offset) |
||
82 | except ValueError: |
||
83 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
84 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
||
85 | |||
86 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
||
87 | base_start_datetime_utc >= base_end_datetime_utc: |
||
88 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
89 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
||
90 | |||
91 | if reporting_start_datetime_local is None: |
||
92 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
93 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
94 | else: |
||
95 | reporting_start_datetime_local = str.strip(reporting_start_datetime_local) |
||
96 | try: |
||
97 | reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, |
||
98 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
99 | timedelta(minutes=timezone_offset) |
||
100 | except ValueError: |
||
101 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
102 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
103 | |||
104 | if reporting_end_datetime_local is None: |
||
105 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
106 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
107 | else: |
||
108 | reporting_end_datetime_local = str.strip(reporting_end_datetime_local) |
||
109 | try: |
||
110 | reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, |
||
111 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
112 | timedelta(minutes=timezone_offset) |
||
113 | except ValueError: |
||
114 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
115 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
116 | |||
117 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
118 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
119 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
||
120 | |||
121 | ################################################################################################################ |
||
122 | # Step 2: query the store |
||
123 | ################################################################################################################ |
||
124 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
125 | cursor_system = cnx_system.cursor() |
||
126 | |||
127 | cnx_billing = mysql.connector.connect(**config.myems_billing_db) |
||
128 | cursor_billing = cnx_billing.cursor() |
||
129 | |||
130 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
131 | cursor_historical = cnx_historical.cursor() |
||
132 | |||
133 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
134 | " FROM tbl_stores " |
||
135 | " WHERE id = %s ", (store_id,)) |
||
136 | row_store = cursor_system.fetchone() |
||
137 | if row_store is None: |
||
138 | if cursor_system: |
||
139 | cursor_system.close() |
||
140 | if cnx_system: |
||
141 | cnx_system.disconnect() |
||
142 | |||
143 | if cursor_billing: |
||
144 | cursor_billing.close() |
||
145 | if cnx_billing: |
||
146 | cnx_billing.disconnect() |
||
147 | |||
148 | if cnx_historical: |
||
149 | cnx_historical.close() |
||
150 | if cursor_historical: |
||
151 | cursor_historical.disconnect() |
||
152 | raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.STORE_NOT_FOUND') |
||
153 | |||
154 | store = dict() |
||
155 | store['id'] = row_store[0] |
||
156 | store['name'] = row_store[1] |
||
157 | store['area'] = row_store[2] |
||
158 | store['cost_center_id'] = row_store[3] |
||
159 | |||
160 | ################################################################################################################ |
||
161 | # Step 3: query energy categories |
||
162 | ################################################################################################################ |
||
163 | energy_category_set = set() |
||
164 | # query energy categories in base period |
||
165 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
||
166 | " FROM tbl_store_input_category_hourly " |
||
167 | " WHERE store_id = %s " |
||
168 | " AND start_datetime_utc >= %s " |
||
169 | " AND start_datetime_utc < %s ", |
||
170 | (store['id'], base_start_datetime_utc, base_end_datetime_utc)) |
||
171 | rows_energy_categories = cursor_billing.fetchall() |
||
172 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
173 | for row_energy_category in rows_energy_categories: |
||
174 | energy_category_set.add(row_energy_category[0]) |
||
175 | |||
176 | # query energy categories in reporting period |
||
177 | cursor_billing.execute(" SELECT DISTINCT(energy_category_id) " |
||
178 | " FROM tbl_store_input_category_hourly " |
||
179 | " WHERE store_id = %s " |
||
180 | " AND start_datetime_utc >= %s " |
||
181 | " AND start_datetime_utc < %s ", |
||
182 | (store['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
||
183 | rows_energy_categories = cursor_billing.fetchall() |
||
184 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
185 | for row_energy_category in rows_energy_categories: |
||
186 | energy_category_set.add(row_energy_category[0]) |
||
187 | |||
188 | # query all energy categories in base period and reporting period |
||
189 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
||
190 | " FROM tbl_energy_categories " |
||
191 | " ORDER BY id ", ) |
||
192 | rows_energy_categories = cursor_system.fetchall() |
||
193 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
||
194 | if cursor_system: |
||
195 | cursor_system.close() |
||
196 | if cnx_system: |
||
197 | cnx_system.disconnect() |
||
198 | |||
199 | if cursor_billing: |
||
200 | cursor_billing.close() |
||
201 | if cnx_billing: |
||
202 | cnx_billing.disconnect() |
||
203 | |||
204 | if cnx_historical: |
||
205 | cnx_historical.close() |
||
206 | if cursor_historical: |
||
207 | cursor_historical.disconnect() |
||
208 | raise falcon.HTTPError(falcon.HTTP_404, |
||
209 | title='API.NOT_FOUND', |
||
210 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
||
211 | energy_category_dict = dict() |
||
212 | for row_energy_category in rows_energy_categories: |
||
213 | if row_energy_category[0] in energy_category_set: |
||
214 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
||
215 | "unit_of_measure": row_energy_category[2], |
||
216 | "kgce": row_energy_category[3], |
||
217 | "kgco2e": row_energy_category[4]} |
||
218 | |||
219 | ################################################################################################################ |
||
220 | # Step 4: query associated sensors |
||
221 | ################################################################################################################ |
||
222 | point_list = list() |
||
223 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
224 | " FROM tbl_stores st, tbl_sensors se, tbl_stores_sensors ss, " |
||
225 | " tbl_points p, tbl_sensors_points sp " |
||
226 | " WHERE st.id = %s AND st.id = ss.store_id AND ss.sensor_id = se.id " |
||
227 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
||
228 | " ORDER BY p.id ", (store['id'], )) |
||
229 | rows_points = cursor_system.fetchall() |
||
230 | if rows_points is not None and len(rows_points) > 0: |
||
231 | for row in rows_points: |
||
232 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
233 | |||
234 | ################################################################################################################ |
||
235 | # Step 5: query associated points |
||
236 | ################################################################################################################ |
||
237 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
238 | " FROM tbl_stores s, tbl_stores_points sp, tbl_points p " |
||
239 | " WHERE s.id = %s AND s.id = sp.store_id AND sp.point_id = p.id " |
||
240 | " ORDER BY p.id ", (store['id'], )) |
||
241 | rows_points = cursor_system.fetchall() |
||
242 | if rows_points is not None and len(rows_points) > 0: |
||
243 | for row in rows_points: |
||
244 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
245 | |||
246 | ################################################################################################################ |
||
247 | # Step 6: query base period energy cost |
||
248 | ################################################################################################################ |
||
249 | base = dict() |
||
250 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
251 | for energy_category_id in energy_category_set: |
||
252 | base[energy_category_id] = dict() |
||
253 | base[energy_category_id]['timestamps'] = list() |
||
254 | base[energy_category_id]['values'] = list() |
||
255 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
||
256 | |||
257 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
||
258 | " FROM tbl_store_input_category_hourly " |
||
259 | " WHERE store_id = %s " |
||
260 | " AND energy_category_id = %s " |
||
261 | " AND start_datetime_utc >= %s " |
||
262 | " AND start_datetime_utc < %s " |
||
263 | " ORDER BY start_datetime_utc ", |
||
264 | (store['id'], |
||
265 | energy_category_id, |
||
266 | base_start_datetime_utc, |
||
267 | base_end_datetime_utc)) |
||
268 | rows_store_hourly = cursor_billing.fetchall() |
||
269 | |||
270 | rows_store_periodically = utilities.aggregate_hourly_data_by_period(rows_store_hourly, |
||
271 | base_start_datetime_utc, |
||
272 | base_end_datetime_utc, |
||
273 | period_type) |
||
274 | for row_store_periodically in rows_store_periodically: |
||
275 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
276 | timedelta(minutes=timezone_offset) |
||
277 | if period_type == 'hourly': |
||
278 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
279 | elif period_type == 'daily': |
||
280 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
281 | elif period_type == 'monthly': |
||
282 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
283 | elif period_type == 'yearly': |
||
284 | current_datetime = current_datetime_local.strftime('%Y') |
||
285 | |||
286 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
||
287 | base[energy_category_id]['timestamps'].append(current_datetime) |
||
288 | base[energy_category_id]['values'].append(actual_value) |
||
289 | base[energy_category_id]['subtotal'] += actual_value |
||
290 | |||
291 | ################################################################################################################ |
||
292 | # Step 7: query reporting period energy cost |
||
293 | ################################################################################################################ |
||
294 | reporting = dict() |
||
295 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
296 | for energy_category_id in energy_category_set: |
||
297 | reporting[energy_category_id] = dict() |
||
298 | reporting[energy_category_id]['timestamps'] = list() |
||
299 | reporting[energy_category_id]['values'] = list() |
||
300 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
||
301 | reporting[energy_category_id]['toppeak'] = Decimal(0.0) |
||
302 | reporting[energy_category_id]['onpeak'] = Decimal(0.0) |
||
303 | reporting[energy_category_id]['midpeak'] = Decimal(0.0) |
||
304 | reporting[energy_category_id]['offpeak'] = Decimal(0.0) |
||
305 | |||
306 | cursor_billing.execute(" SELECT start_datetime_utc, actual_value " |
||
307 | " FROM tbl_store_input_category_hourly " |
||
308 | " WHERE store_id = %s " |
||
309 | " AND energy_category_id = %s " |
||
310 | " AND start_datetime_utc >= %s " |
||
311 | " AND start_datetime_utc < %s " |
||
312 | " ORDER BY start_datetime_utc ", |
||
313 | (store['id'], |
||
314 | energy_category_id, |
||
315 | reporting_start_datetime_utc, |
||
316 | reporting_end_datetime_utc)) |
||
317 | rows_store_hourly = cursor_billing.fetchall() |
||
318 | |||
319 | rows_store_periodically = utilities.aggregate_hourly_data_by_period(rows_store_hourly, |
||
320 | reporting_start_datetime_utc, |
||
321 | reporting_end_datetime_utc, |
||
322 | period_type) |
||
323 | for row_store_periodically in rows_store_periodically: |
||
324 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
325 | timedelta(minutes=timezone_offset) |
||
326 | if period_type == 'hourly': |
||
327 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
328 | elif period_type == 'daily': |
||
329 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
330 | elif period_type == 'monthly': |
||
331 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
332 | elif period_type == 'yearly': |
||
333 | current_datetime = current_datetime_local.strftime('%Y') |
||
334 | |||
335 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
||
336 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
||
337 | reporting[energy_category_id]['values'].append(actual_value) |
||
338 | reporting[energy_category_id]['subtotal'] += actual_value |
||
339 | |||
340 | energy_category_tariff_dict = utilities.get_energy_category_peak_types(store['cost_center_id'], |
||
341 | energy_category_id, |
||
342 | reporting_start_datetime_utc, |
||
343 | reporting_end_datetime_utc) |
||
344 | for row in rows_store_hourly: |
||
345 | peak_type = energy_category_tariff_dict.get(row[0], None) |
||
346 | if peak_type == 'toppeak': |
||
347 | reporting[energy_category_id]['toppeak'] += row[1] |
||
348 | elif peak_type == 'onpeak': |
||
349 | reporting[energy_category_id]['onpeak'] += row[1] |
||
350 | elif peak_type == 'midpeak': |
||
351 | reporting[energy_category_id]['midpeak'] += row[1] |
||
352 | elif peak_type == 'offpeak': |
||
353 | reporting[energy_category_id]['offpeak'] += row[1] |
||
354 | |||
355 | ################################################################################################################ |
||
356 | # Step 8: query tariff data |
||
357 | ################################################################################################################ |
||
358 | parameters_data = dict() |
||
359 | parameters_data['names'] = list() |
||
360 | parameters_data['timestamps'] = list() |
||
361 | parameters_data['values'] = list() |
||
362 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
363 | for energy_category_id in energy_category_set: |
||
364 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(store['cost_center_id'], |
||
365 | energy_category_id, |
||
366 | reporting_start_datetime_utc, |
||
367 | reporting_end_datetime_utc) |
||
368 | tariff_timestamp_list = list() |
||
369 | tariff_value_list = list() |
||
370 | for k, v in energy_category_tariff_dict.items(): |
||
371 | # convert k from utc to local |
||
372 | k = k + timedelta(minutes=timezone_offset) |
||
373 | tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) |
||
374 | tariff_value_list.append(v) |
||
375 | |||
376 | parameters_data['names'].append('TARIFF-' + energy_category_dict[energy_category_id]['name']) |
||
377 | parameters_data['timestamps'].append(tariff_timestamp_list) |
||
378 | parameters_data['values'].append(tariff_value_list) |
||
379 | |||
380 | ################################################################################################################ |
||
381 | # Step 9: query associated sensors and points data |
||
382 | ################################################################################################################ |
||
383 | for point in point_list: |
||
384 | point_values = [] |
||
385 | point_timestamps = [] |
||
386 | if point['object_type'] == 'ANALOG_VALUE': |
||
387 | query = (" SELECT utc_date_time, actual_value " |
||
388 | " FROM tbl_analog_value " |
||
389 | " WHERE point_id = %s " |
||
390 | " AND utc_date_time BETWEEN %s AND %s " |
||
391 | " ORDER BY utc_date_time ") |
||
392 | cursor_historical.execute(query, (point['id'], |
||
393 | reporting_start_datetime_utc, |
||
394 | reporting_end_datetime_utc)) |
||
395 | rows = cursor_historical.fetchall() |
||
396 | |||
397 | if rows is not None and len(rows) > 0: |
||
398 | for row in rows: |
||
399 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
400 | timedelta(minutes=timezone_offset) |
||
401 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
402 | point_timestamps.append(current_datetime) |
||
403 | point_values.append(row[1]) |
||
404 | |||
405 | elif point['object_type'] == 'ENERGY_VALUE': |
||
406 | query = (" SELECT utc_date_time, actual_value " |
||
407 | " FROM tbl_energy_value " |
||
408 | " WHERE point_id = %s " |
||
409 | " AND utc_date_time BETWEEN %s AND %s " |
||
410 | " ORDER BY utc_date_time ") |
||
411 | cursor_historical.execute(query, (point['id'], |
||
412 | reporting_start_datetime_utc, |
||
413 | reporting_end_datetime_utc)) |
||
414 | rows = cursor_historical.fetchall() |
||
415 | |||
416 | if rows is not None and len(rows) > 0: |
||
417 | for row in rows: |
||
418 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
419 | timedelta(minutes=timezone_offset) |
||
420 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
421 | point_timestamps.append(current_datetime) |
||
422 | point_values.append(row[1]) |
||
423 | elif point['object_type'] == 'DIGITAL_VALUE': |
||
424 | query = (" SELECT utc_date_time, actual_value " |
||
425 | " FROM tbl_digital_value " |
||
426 | " WHERE point_id = %s " |
||
427 | " AND utc_date_time BETWEEN %s AND %s ") |
||
428 | cursor_historical.execute(query, (point['id'], |
||
429 | reporting_start_datetime_utc, |
||
430 | reporting_end_datetime_utc)) |
||
431 | rows = cursor_historical.fetchall() |
||
432 | |||
433 | if rows is not None and len(rows) > 0: |
||
434 | for row in rows: |
||
435 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
436 | timedelta(minutes=timezone_offset) |
||
437 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
438 | point_timestamps.append(current_datetime) |
||
439 | point_values.append(row[1]) |
||
440 | |||
441 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
||
442 | parameters_data['timestamps'].append(point_timestamps) |
||
443 | parameters_data['values'].append(point_values) |
||
444 | |||
445 | ################################################################################################################ |
||
446 | # Step 10: construct the report |
||
447 | ################################################################################################################ |
||
448 | if cursor_system: |
||
449 | cursor_system.close() |
||
450 | if cnx_system: |
||
451 | cnx_system.disconnect() |
||
452 | |||
453 | if cursor_billing: |
||
454 | cursor_billing.close() |
||
455 | if cnx_billing: |
||
456 | cnx_billing.disconnect() |
||
457 | |||
458 | result = dict() |
||
459 | |||
460 | result['store'] = dict() |
||
461 | result['store']['name'] = store['name'] |
||
462 | result['store']['area'] = store['area'] |
||
463 | |||
464 | result['base_period'] = dict() |
||
465 | result['base_period']['names'] = list() |
||
466 | result['base_period']['units'] = list() |
||
467 | result['base_period']['timestamps'] = list() |
||
468 | result['base_period']['values'] = list() |
||
469 | result['base_period']['subtotals'] = list() |
||
470 | result['base_period']['total'] = Decimal(0.0) |
||
471 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
472 | for energy_category_id in energy_category_set: |
||
473 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
474 | result['base_period']['units'].append(config.currency_unit) |
||
475 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
||
476 | result['base_period']['values'].append(base[energy_category_id]['values']) |
||
477 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
||
478 | result['base_period']['total'] += base[energy_category_id]['subtotal'] |
||
479 | |||
480 | result['reporting_period'] = dict() |
||
481 | result['reporting_period']['names'] = list() |
||
482 | result['reporting_period']['energy_category_ids'] = list() |
||
483 | result['reporting_period']['units'] = list() |
||
484 | result['reporting_period']['timestamps'] = list() |
||
485 | result['reporting_period']['values'] = list() |
||
486 | result['reporting_period']['subtotals'] = list() |
||
487 | result['reporting_period']['subtotals_per_unit_area'] = list() |
||
488 | result['reporting_period']['toppeaks'] = list() |
||
489 | result['reporting_period']['onpeaks'] = list() |
||
490 | result['reporting_period']['midpeaks'] = list() |
||
491 | result['reporting_period']['offpeaks'] = list() |
||
492 | result['reporting_period']['increment_rates'] = list() |
||
493 | result['reporting_period']['total'] = Decimal(0.0) |
||
494 | result['reporting_period']['total_per_unit_area'] = Decimal(0.0) |
||
495 | result['reporting_period']['total_increment_rate'] = Decimal(0.0) |
||
496 | result['reporting_period']['total_unit'] = config.currency_unit |
||
497 | |||
498 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
499 | for energy_category_id in energy_category_set: |
||
500 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
501 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
||
502 | result['reporting_period']['units'].append(config.currency_unit) |
||
503 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
||
504 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
||
505 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
||
506 | result['reporting_period']['subtotals_per_unit_area'].append( |
||
507 | reporting[energy_category_id]['subtotal'] / store['area'] if store['area'] > 0.0 else None) |
||
508 | result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) |
||
509 | result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) |
||
510 | result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) |
||
511 | result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) |
||
512 | result['reporting_period']['increment_rates'].append( |
||
513 | (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / |
||
514 | base[energy_category_id]['subtotal'] |
||
515 | if base[energy_category_id]['subtotal'] > 0.0 else None) |
||
516 | result['reporting_period']['total'] += reporting[energy_category_id]['subtotal'] |
||
517 | |||
518 | result['reporting_period']['total_per_unit_area'] = \ |
||
519 | result['reporting_period']['total'] / store['area'] if store['area'] > 0.0 else None |
||
520 | |||
521 | result['reporting_period']['total_increment_rate'] = \ |
||
522 | (result['reporting_period']['total'] - result['base_period']['total']) / \ |
||
523 | result['base_period']['total'] \ |
||
524 | if result['base_period']['total'] > Decimal(0.0) else None |
||
525 | |||
526 | result['parameters'] = { |
||
527 | "names": parameters_data['names'], |
||
528 | "timestamps": parameters_data['timestamps'], |
||
529 | "values": parameters_data['values'] |
||
530 | } |
||
531 | # export result to Excel file and then encode the file to base64 string |
||
532 | result['excel_bytes_base64'] = excelexporters.storecost.export(result, |
||
533 | store['name'], |
||
534 | reporting_start_datetime_local, |
||
535 | reporting_end_datetime_local, |
||
536 | period_type) |
||
537 | resp.body = json.dumps(result) |
||
538 |