| Total Complexity | 109 |
| Total Lines | 621 |
| Duplicated Lines | 98.07 % |
| Changes | 0 | ||
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like reports.storestatistics often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
| 1 | import falcon |
||
| 2 | import simplejson as json |
||
| 3 | import mysql.connector |
||
| 4 | import config |
||
| 5 | from datetime import datetime, timedelta, timezone |
||
| 6 | |||
| 7 | import excelexporters.storestatistics |
||
| 8 | from core import utilities |
||
| 9 | from decimal import Decimal |
||
| 10 | |||
| 11 | |||
| 12 | View Code Duplication | class Reporting: |
|
|
|
|||
| 13 | @staticmethod |
||
| 14 | def __init__(): |
||
| 15 | pass |
||
| 16 | |||
| 17 | @staticmethod |
||
| 18 | def on_options(req, resp): |
||
| 19 | resp.status = falcon.HTTP_200 |
||
| 20 | |||
| 21 | #################################################################################################################### |
||
| 22 | # PROCEDURES |
||
| 23 | # Step 1: valid parameters |
||
| 24 | # Step 2: query the store |
||
| 25 | # Step 3: query energy categories |
||
| 26 | # Step 4: query associated sensors |
||
| 27 | # Step 5: query associated points |
||
| 28 | # Step 6: query base period energy input |
||
| 29 | # Step 7: query reporting period energy input |
||
| 30 | # Step 8: query tariff data |
||
| 31 | # Step 9: query associated sensors and points data |
||
| 32 | # Step 10: construct the report |
||
| 33 | #################################################################################################################### |
||
| 34 | @staticmethod |
||
| 35 | def on_get(req, resp): |
||
| 36 | print(req.params) |
||
| 37 | store_id = req.params.get('storeid') |
||
| 38 | period_type = req.params.get('periodtype') |
||
| 39 | base_start_datetime_local = req.params.get('baseperiodstartdatetime') |
||
| 40 | base_end_datetime_local = req.params.get('baseperiodenddatetime') |
||
| 41 | reporting_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
||
| 42 | reporting_end_datetime_local = req.params.get('reportingperiodenddatetime') |
||
| 43 | |||
| 44 | ################################################################################################################ |
||
| 45 | # Step 1: valid parameters |
||
| 46 | ################################################################################################################ |
||
| 47 | if store_id is None: |
||
| 48 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_STORE_ID') |
||
| 49 | else: |
||
| 50 | store_id = str.strip(store_id) |
||
| 51 | if not store_id.isdigit() or int(store_id) <= 0: |
||
| 52 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_STORE_ID') |
||
| 53 | |||
| 54 | if period_type is None: |
||
| 55 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
| 56 | else: |
||
| 57 | period_type = str.strip(period_type) |
||
| 58 | if period_type not in ['hourly', 'daily', 'monthly', 'yearly']: |
||
| 59 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') |
||
| 60 | |||
| 61 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
| 62 | if config.utc_offset[0] == '-': |
||
| 63 | timezone_offset = -timezone_offset |
||
| 64 | |||
| 65 | base_start_datetime_utc = None |
||
| 66 | if base_start_datetime_local is not None and len(str.strip(base_start_datetime_local)) > 0: |
||
| 67 | base_start_datetime_local = str.strip(base_start_datetime_local) |
||
| 68 | try: |
||
| 69 | base_start_datetime_utc = datetime.strptime(base_start_datetime_local, |
||
| 70 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
| 71 | timedelta(minutes=timezone_offset) |
||
| 72 | except ValueError: |
||
| 73 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 74 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
||
| 75 | |||
| 76 | base_end_datetime_utc = None |
||
| 77 | if base_end_datetime_local is not None and len(str.strip(base_end_datetime_local)) > 0: |
||
| 78 | base_end_datetime_local = str.strip(base_end_datetime_local) |
||
| 79 | try: |
||
| 80 | base_end_datetime_utc = datetime.strptime(base_end_datetime_local, |
||
| 81 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
| 82 | timedelta(minutes=timezone_offset) |
||
| 83 | except ValueError: |
||
| 84 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 85 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
||
| 86 | |||
| 87 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
||
| 88 | base_start_datetime_utc >= base_end_datetime_utc: |
||
| 89 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 90 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
||
| 91 | |||
| 92 | if reporting_start_datetime_local is None: |
||
| 93 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 94 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
| 95 | else: |
||
| 96 | reporting_start_datetime_local = str.strip(reporting_start_datetime_local) |
||
| 97 | try: |
||
| 98 | reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, |
||
| 99 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
| 100 | timedelta(minutes=timezone_offset) |
||
| 101 | except ValueError: |
||
| 102 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 103 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
| 104 | |||
| 105 | if reporting_end_datetime_local is None: |
||
| 106 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 107 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
| 108 | else: |
||
| 109 | reporting_end_datetime_local = str.strip(reporting_end_datetime_local) |
||
| 110 | try: |
||
| 111 | reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, |
||
| 112 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
| 113 | timedelta(minutes=timezone_offset) |
||
| 114 | except ValueError: |
||
| 115 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 116 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
| 117 | |||
| 118 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
| 119 | raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 120 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
||
| 121 | |||
| 122 | ################################################################################################################ |
||
| 123 | # Step 2: query the store |
||
| 124 | ################################################################################################################ |
||
| 125 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
| 126 | cursor_system = cnx_system.cursor() |
||
| 127 | |||
| 128 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
||
| 129 | cursor_energy = cnx_energy.cursor() |
||
| 130 | |||
| 131 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
| 132 | cursor_historical = cnx_historical.cursor() |
||
| 133 | |||
| 134 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
| 135 | " FROM tbl_stores " |
||
| 136 | " WHERE id = %s ", (store_id,)) |
||
| 137 | row_store = cursor_system.fetchone() |
||
| 138 | if row_store is None: |
||
| 139 | if cursor_system: |
||
| 140 | cursor_system.close() |
||
| 141 | if cnx_system: |
||
| 142 | cnx_system.disconnect() |
||
| 143 | |||
| 144 | if cursor_energy: |
||
| 145 | cursor_energy.close() |
||
| 146 | if cnx_energy: |
||
| 147 | cnx_energy.disconnect() |
||
| 148 | |||
| 149 | if cnx_historical: |
||
| 150 | cnx_historical.close() |
||
| 151 | if cursor_historical: |
||
| 152 | cursor_historical.disconnect() |
||
| 153 | raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.STORE_NOT_FOUND') |
||
| 154 | |||
| 155 | store = dict() |
||
| 156 | store['id'] = row_store[0] |
||
| 157 | store['name'] = row_store[1] |
||
| 158 | store['area'] = row_store[2] |
||
| 159 | store['cost_center_id'] = row_store[3] |
||
| 160 | |||
| 161 | ################################################################################################################ |
||
| 162 | # Step 3: query energy categories |
||
| 163 | ################################################################################################################ |
||
| 164 | energy_category_set = set() |
||
| 165 | # query energy categories in base period |
||
| 166 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
| 167 | " FROM tbl_store_input_category_hourly " |
||
| 168 | " WHERE store_id = %s " |
||
| 169 | " AND start_datetime_utc >= %s " |
||
| 170 | " AND start_datetime_utc < %s ", |
||
| 171 | (store['id'], base_start_datetime_utc, base_end_datetime_utc)) |
||
| 172 | rows_energy_categories = cursor_energy.fetchall() |
||
| 173 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
| 174 | for row_energy_category in rows_energy_categories: |
||
| 175 | energy_category_set.add(row_energy_category[0]) |
||
| 176 | |||
| 177 | # query energy categories in reporting period |
||
| 178 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
| 179 | " FROM tbl_store_input_category_hourly " |
||
| 180 | " WHERE store_id = %s " |
||
| 181 | " AND start_datetime_utc >= %s " |
||
| 182 | " AND start_datetime_utc < %s ", |
||
| 183 | (store['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
||
| 184 | rows_energy_categories = cursor_energy.fetchall() |
||
| 185 | if rows_energy_categories is not None or len(rows_energy_categories) > 0: |
||
| 186 | for row_energy_category in rows_energy_categories: |
||
| 187 | energy_category_set.add(row_energy_category[0]) |
||
| 188 | |||
| 189 | # query all energy categories in base period and reporting period |
||
| 190 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
||
| 191 | " FROM tbl_energy_categories " |
||
| 192 | " ORDER BY id ", ) |
||
| 193 | rows_energy_categories = cursor_system.fetchall() |
||
| 194 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
||
| 195 | if cursor_system: |
||
| 196 | cursor_system.close() |
||
| 197 | if cnx_system: |
||
| 198 | cnx_system.disconnect() |
||
| 199 | |||
| 200 | if cursor_energy: |
||
| 201 | cursor_energy.close() |
||
| 202 | if cnx_energy: |
||
| 203 | cnx_energy.disconnect() |
||
| 204 | |||
| 205 | if cnx_historical: |
||
| 206 | cnx_historical.close() |
||
| 207 | if cursor_historical: |
||
| 208 | cursor_historical.disconnect() |
||
| 209 | raise falcon.HTTPError(falcon.HTTP_404, |
||
| 210 | title='API.NOT_FOUND', |
||
| 211 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
||
| 212 | energy_category_dict = dict() |
||
| 213 | for row_energy_category in rows_energy_categories: |
||
| 214 | if row_energy_category[0] in energy_category_set: |
||
| 215 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
||
| 216 | "unit_of_measure": row_energy_category[2], |
||
| 217 | "kgce": row_energy_category[3], |
||
| 218 | "kgco2e": row_energy_category[4]} |
||
| 219 | |||
| 220 | ################################################################################################################ |
||
| 221 | # Step 4: query associated sensors |
||
| 222 | ################################################################################################################ |
||
| 223 | point_list = list() |
||
| 224 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
| 225 | " FROM tbl_stores st, tbl_sensors se, tbl_stores_sensors ss, " |
||
| 226 | " tbl_points p, tbl_sensors_points sp " |
||
| 227 | " WHERE st.id = %s AND st.id = ss.store_id AND ss.sensor_id = se.id " |
||
| 228 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
||
| 229 | " ORDER BY p.id ", (store['id'],)) |
||
| 230 | rows_points = cursor_system.fetchall() |
||
| 231 | if rows_points is not None and len(rows_points) > 0: |
||
| 232 | for row in rows_points: |
||
| 233 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
| 234 | |||
| 235 | ################################################################################################################ |
||
| 236 | # Step 5: query associated points |
||
| 237 | ################################################################################################################ |
||
| 238 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
| 239 | " FROM tbl_stores s, tbl_stores_points sp, tbl_points p " |
||
| 240 | " WHERE s.id = %s AND s.id = sp.store_id AND sp.point_id = p.id " |
||
| 241 | " ORDER BY p.id ", (store['id'],)) |
||
| 242 | rows_points = cursor_system.fetchall() |
||
| 243 | if rows_points is not None and len(rows_points) > 0: |
||
| 244 | for row in rows_points: |
||
| 245 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
| 246 | |||
| 247 | ################################################################################################################ |
||
| 248 | # Step 6: query base period energy input |
||
| 249 | ################################################################################################################ |
||
| 250 | base = dict() |
||
| 251 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 252 | for energy_category_id in energy_category_set: |
||
| 253 | base[energy_category_id] = dict() |
||
| 254 | base[energy_category_id]['timestamps'] = list() |
||
| 255 | base[energy_category_id]['values'] = list() |
||
| 256 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
||
| 257 | base[energy_category_id]['mean'] = None |
||
| 258 | base[energy_category_id]['median'] = None |
||
| 259 | base[energy_category_id]['minimum'] = None |
||
| 260 | base[energy_category_id]['maximum'] = None |
||
| 261 | base[energy_category_id]['stdev'] = None |
||
| 262 | base[energy_category_id]['variance'] = None |
||
| 263 | |||
| 264 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
| 265 | " FROM tbl_store_input_category_hourly " |
||
| 266 | " WHERE store_id = %s " |
||
| 267 | " AND energy_category_id = %s " |
||
| 268 | " AND start_datetime_utc >= %s " |
||
| 269 | " AND start_datetime_utc < %s " |
||
| 270 | " ORDER BY start_datetime_utc ", |
||
| 271 | (store['id'], |
||
| 272 | energy_category_id, |
||
| 273 | base_start_datetime_utc, |
||
| 274 | base_end_datetime_utc)) |
||
| 275 | rows_store_hourly = cursor_energy.fetchall() |
||
| 276 | |||
| 277 | rows_store_periodically, \ |
||
| 278 | base[energy_category_id]['mean'], \ |
||
| 279 | base[energy_category_id]['median'], \ |
||
| 280 | base[energy_category_id]['minimum'], \ |
||
| 281 | base[energy_category_id]['maximum'], \ |
||
| 282 | base[energy_category_id]['stdev'], \ |
||
| 283 | base[energy_category_id]['variance'] = \ |
||
| 284 | utilities.statistics_hourly_data_by_period(rows_store_hourly, |
||
| 285 | base_start_datetime_utc, |
||
| 286 | base_end_datetime_utc, |
||
| 287 | period_type) |
||
| 288 | |||
| 289 | for row_store_periodically in rows_store_periodically: |
||
| 290 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
| 291 | timedelta(minutes=timezone_offset) |
||
| 292 | if period_type == 'hourly': |
||
| 293 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
| 294 | elif period_type == 'daily': |
||
| 295 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
| 296 | elif period_type == 'monthly': |
||
| 297 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
| 298 | elif period_type == 'yearly': |
||
| 299 | current_datetime = current_datetime_local.strftime('%Y') |
||
| 300 | |||
| 301 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
||
| 302 | base[energy_category_id]['timestamps'].append(current_datetime) |
||
| 303 | base[energy_category_id]['values'].append(actual_value) |
||
| 304 | base[energy_category_id]['subtotal'] += actual_value |
||
| 305 | |||
| 306 | ################################################################################################################ |
||
| 307 | # Step 7: query reporting period energy input |
||
| 308 | ################################################################################################################ |
||
| 309 | reporting = dict() |
||
| 310 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 311 | for energy_category_id in energy_category_set: |
||
| 312 | reporting[energy_category_id] = dict() |
||
| 313 | reporting[energy_category_id]['timestamps'] = list() |
||
| 314 | reporting[energy_category_id]['values'] = list() |
||
| 315 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
||
| 316 | reporting[energy_category_id]['mean'] = None |
||
| 317 | reporting[energy_category_id]['median'] = None |
||
| 318 | reporting[energy_category_id]['minimum'] = None |
||
| 319 | reporting[energy_category_id]['maximum'] = None |
||
| 320 | reporting[energy_category_id]['stdev'] = None |
||
| 321 | reporting[energy_category_id]['variance'] = None |
||
| 322 | |||
| 323 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
| 324 | " FROM tbl_store_input_category_hourly " |
||
| 325 | " WHERE store_id = %s " |
||
| 326 | " AND energy_category_id = %s " |
||
| 327 | " AND start_datetime_utc >= %s " |
||
| 328 | " AND start_datetime_utc < %s " |
||
| 329 | " ORDER BY start_datetime_utc ", |
||
| 330 | (store['id'], |
||
| 331 | energy_category_id, |
||
| 332 | reporting_start_datetime_utc, |
||
| 333 | reporting_end_datetime_utc)) |
||
| 334 | rows_store_hourly = cursor_energy.fetchall() |
||
| 335 | |||
| 336 | rows_store_periodically, \ |
||
| 337 | reporting[energy_category_id]['mean'], \ |
||
| 338 | reporting[energy_category_id]['median'], \ |
||
| 339 | reporting[energy_category_id]['minimum'], \ |
||
| 340 | reporting[energy_category_id]['maximum'], \ |
||
| 341 | reporting[energy_category_id]['stdev'], \ |
||
| 342 | reporting[energy_category_id]['variance'] = \ |
||
| 343 | utilities.statistics_hourly_data_by_period(rows_store_hourly, |
||
| 344 | reporting_start_datetime_utc, |
||
| 345 | reporting_end_datetime_utc, |
||
| 346 | period_type) |
||
| 347 | |||
| 348 | for row_store_periodically in rows_store_periodically: |
||
| 349 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
| 350 | timedelta(minutes=timezone_offset) |
||
| 351 | if period_type == 'hourly': |
||
| 352 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
| 353 | elif period_type == 'daily': |
||
| 354 | current_datetime = current_datetime_local.strftime('%Y-%m-%d') |
||
| 355 | elif period_type == 'monthly': |
||
| 356 | current_datetime = current_datetime_local.strftime('%Y-%m') |
||
| 357 | elif period_type == 'yearly': |
||
| 358 | current_datetime = current_datetime_local.strftime('%Y') |
||
| 359 | |||
| 360 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
||
| 361 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
||
| 362 | reporting[energy_category_id]['values'].append(actual_value) |
||
| 363 | reporting[energy_category_id]['subtotal'] += actual_value |
||
| 364 | |||
| 365 | ################################################################################################################ |
||
| 366 | # Step 8: query tariff data |
||
| 367 | ################################################################################################################ |
||
| 368 | parameters_data = dict() |
||
| 369 | parameters_data['names'] = list() |
||
| 370 | parameters_data['timestamps'] = list() |
||
| 371 | parameters_data['values'] = list() |
||
| 372 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 373 | for energy_category_id in energy_category_set: |
||
| 374 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(store['cost_center_id'], |
||
| 375 | energy_category_id, |
||
| 376 | reporting_start_datetime_utc, |
||
| 377 | reporting_end_datetime_utc) |
||
| 378 | tariff_timestamp_list = list() |
||
| 379 | tariff_value_list = list() |
||
| 380 | for k, v in energy_category_tariff_dict.items(): |
||
| 381 | # convert k from utc to local |
||
| 382 | k = k + timedelta(minutes=timezone_offset) |
||
| 383 | tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) |
||
| 384 | tariff_value_list.append(v) |
||
| 385 | |||
| 386 | parameters_data['names'].append('TARIFF-' + energy_category_dict[energy_category_id]['name']) |
||
| 387 | parameters_data['timestamps'].append(tariff_timestamp_list) |
||
| 388 | parameters_data['values'].append(tariff_value_list) |
||
| 389 | |||
| 390 | ################################################################################################################ |
||
| 391 | # Step 9: query associated sensors and points data |
||
| 392 | ################################################################################################################ |
||
| 393 | for point in point_list: |
||
| 394 | point_values = [] |
||
| 395 | point_timestamps = [] |
||
| 396 | if point['object_type'] == 'ANALOG_VALUE': |
||
| 397 | query = (" SELECT utc_date_time, actual_value " |
||
| 398 | " FROM tbl_analog_value " |
||
| 399 | " WHERE point_id = %s " |
||
| 400 | " AND utc_date_time BETWEEN %s AND %s " |
||
| 401 | " ORDER BY utc_date_time ") |
||
| 402 | cursor_historical.execute(query, (point['id'], |
||
| 403 | reporting_start_datetime_utc, |
||
| 404 | reporting_end_datetime_utc)) |
||
| 405 | rows = cursor_historical.fetchall() |
||
| 406 | |||
| 407 | if rows is not None and len(rows) > 0: |
||
| 408 | for row in rows: |
||
| 409 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
| 410 | timedelta(minutes=timezone_offset) |
||
| 411 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
| 412 | point_timestamps.append(current_datetime) |
||
| 413 | point_values.append(row[1]) |
||
| 414 | |||
| 415 | elif point['object_type'] == 'ENERGY_VALUE': |
||
| 416 | query = (" SELECT utc_date_time, actual_value " |
||
| 417 | " FROM tbl_energy_value " |
||
| 418 | " WHERE point_id = %s " |
||
| 419 | " AND utc_date_time BETWEEN %s AND %s " |
||
| 420 | " ORDER BY utc_date_time ") |
||
| 421 | cursor_historical.execute(query, (point['id'], |
||
| 422 | reporting_start_datetime_utc, |
||
| 423 | reporting_end_datetime_utc)) |
||
| 424 | rows = cursor_historical.fetchall() |
||
| 425 | |||
| 426 | if rows is not None and len(rows) > 0: |
||
| 427 | for row in rows: |
||
| 428 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
| 429 | timedelta(minutes=timezone_offset) |
||
| 430 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
| 431 | point_timestamps.append(current_datetime) |
||
| 432 | point_values.append(row[1]) |
||
| 433 | elif point['object_type'] == 'DIGITAL_VALUE': |
||
| 434 | query = (" SELECT utc_date_time, actual_value " |
||
| 435 | " FROM tbl_digital_value " |
||
| 436 | " WHERE point_id = %s " |
||
| 437 | " AND utc_date_time BETWEEN %s AND %s ") |
||
| 438 | cursor_historical.execute(query, (point['id'], |
||
| 439 | reporting_start_datetime_utc, |
||
| 440 | reporting_end_datetime_utc)) |
||
| 441 | rows = cursor_historical.fetchall() |
||
| 442 | |||
| 443 | if rows is not None and len(rows) > 0: |
||
| 444 | for row in rows: |
||
| 445 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
| 446 | timedelta(minutes=timezone_offset) |
||
| 447 | current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') |
||
| 448 | point_timestamps.append(current_datetime) |
||
| 449 | point_values.append(row[1]) |
||
| 450 | |||
| 451 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
||
| 452 | parameters_data['timestamps'].append(point_timestamps) |
||
| 453 | parameters_data['values'].append(point_values) |
||
| 454 | |||
| 455 | ################################################################################################################ |
||
| 456 | # Step 10: construct the report |
||
| 457 | ################################################################################################################ |
||
| 458 | if cursor_system: |
||
| 459 | cursor_system.close() |
||
| 460 | if cnx_system: |
||
| 461 | cnx_system.disconnect() |
||
| 462 | |||
| 463 | if cursor_energy: |
||
| 464 | cursor_energy.close() |
||
| 465 | if cnx_energy: |
||
| 466 | cnx_energy.disconnect() |
||
| 467 | |||
| 468 | result = dict() |
||
| 469 | |||
| 470 | result['store'] = dict() |
||
| 471 | result['store']['name'] = store['name'] |
||
| 472 | result['store']['area'] = store['area'] |
||
| 473 | |||
| 474 | result['base_period'] = dict() |
||
| 475 | result['base_period']['names'] = list() |
||
| 476 | result['base_period']['units'] = list() |
||
| 477 | result['base_period']['timestamps'] = list() |
||
| 478 | result['base_period']['values'] = list() |
||
| 479 | result['base_period']['subtotals'] = list() |
||
| 480 | result['base_period']['means'] = list() |
||
| 481 | result['base_period']['medians'] = list() |
||
| 482 | result['base_period']['minimums'] = list() |
||
| 483 | result['base_period']['maximums'] = list() |
||
| 484 | result['base_period']['stdevs'] = list() |
||
| 485 | result['base_period']['variances'] = list() |
||
| 486 | |||
| 487 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 488 | for energy_category_id in energy_category_set: |
||
| 489 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
| 490 | result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
| 491 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
||
| 492 | result['base_period']['values'].append(base[energy_category_id]['values']) |
||
| 493 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
||
| 494 | result['base_period']['means'].append(base[energy_category_id]['mean']) |
||
| 495 | result['base_period']['medians'].append(base[energy_category_id]['median']) |
||
| 496 | result['base_period']['minimums'].append(base[energy_category_id]['minimum']) |
||
| 497 | result['base_period']['maximums'].append(base[energy_category_id]['maximum']) |
||
| 498 | result['base_period']['stdevs'].append(base[energy_category_id]['stdev']) |
||
| 499 | result['base_period']['variances'].append(base[energy_category_id]['variance']) |
||
| 500 | |||
| 501 | result['reporting_period'] = dict() |
||
| 502 | result['reporting_period']['names'] = list() |
||
| 503 | result['reporting_period']['energy_category_ids'] = list() |
||
| 504 | result['reporting_period']['units'] = list() |
||
| 505 | result['reporting_period']['timestamps'] = list() |
||
| 506 | result['reporting_period']['values'] = list() |
||
| 507 | result['reporting_period']['subtotals'] = list() |
||
| 508 | result['reporting_period']['means'] = list() |
||
| 509 | result['reporting_period']['means_per_unit_area'] = list() |
||
| 510 | result['reporting_period']['means_increment_rate'] = list() |
||
| 511 | result['reporting_period']['medians'] = list() |
||
| 512 | result['reporting_period']['medians_per_unit_area'] = list() |
||
| 513 | result['reporting_period']['medians_increment_rate'] = list() |
||
| 514 | result['reporting_period']['minimums'] = list() |
||
| 515 | result['reporting_period']['minimums_per_unit_area'] = list() |
||
| 516 | result['reporting_period']['minimums_increment_rate'] = list() |
||
| 517 | result['reporting_period']['maximums'] = list() |
||
| 518 | result['reporting_period']['maximums_per_unit_area'] = list() |
||
| 519 | result['reporting_period']['maximums_increment_rate'] = list() |
||
| 520 | result['reporting_period']['stdevs'] = list() |
||
| 521 | result['reporting_period']['stdevs_per_unit_area'] = list() |
||
| 522 | result['reporting_period']['stdevs_increment_rate'] = list() |
||
| 523 | result['reporting_period']['variances'] = list() |
||
| 524 | result['reporting_period']['variances_per_unit_area'] = list() |
||
| 525 | result['reporting_period']['variances_increment_rate'] = list() |
||
| 526 | |||
| 527 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 528 | for energy_category_id in energy_category_set: |
||
| 529 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
| 530 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
||
| 531 | result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
| 532 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
||
| 533 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
||
| 534 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
||
| 535 | result['reporting_period']['means'].append(reporting[energy_category_id]['mean']) |
||
| 536 | result['reporting_period']['means_per_unit_area'].append( |
||
| 537 | reporting[energy_category_id]['mean'] / store['area'] |
||
| 538 | if reporting[energy_category_id]['mean'] is not None and |
||
| 539 | store['area'] is not None and |
||
| 540 | store['area'] > Decimal(0.0) |
||
| 541 | else None) |
||
| 542 | result['reporting_period']['means_increment_rate'].append( |
||
| 543 | (reporting[energy_category_id]['mean'] - base[energy_category_id]['mean']) / |
||
| 544 | base[energy_category_id]['mean'] if (base[energy_category_id]['mean'] is not None and |
||
| 545 | base[energy_category_id]['mean'] > Decimal(0.0)) |
||
| 546 | else None) |
||
| 547 | result['reporting_period']['medians'].append(reporting[energy_category_id]['median']) |
||
| 548 | result['reporting_period']['medians_per_unit_area'].append( |
||
| 549 | reporting[energy_category_id]['median'] / store['area'] |
||
| 550 | if reporting[energy_category_id]['median'] is not None and |
||
| 551 | store['area'] is not None and |
||
| 552 | store['area'] > Decimal(0.0) |
||
| 553 | else None) |
||
| 554 | result['reporting_period']['medians_increment_rate'].append( |
||
| 555 | (reporting[energy_category_id]['median'] - base[energy_category_id]['median']) / |
||
| 556 | base[energy_category_id]['median'] if (base[energy_category_id]['median'] is not None and |
||
| 557 | base[energy_category_id]['median'] > Decimal(0.0)) |
||
| 558 | else None) |
||
| 559 | result['reporting_period']['minimums'].append(reporting[energy_category_id]['minimum']) |
||
| 560 | result['reporting_period']['minimums_per_unit_area'].append( |
||
| 561 | reporting[energy_category_id]['minimum'] / store['area'] |
||
| 562 | if reporting[energy_category_id]['minimum'] is not None and |
||
| 563 | store['area'] is not None and |
||
| 564 | store['area'] > Decimal(0.0) |
||
| 565 | else None) |
||
| 566 | result['reporting_period']['minimums_increment_rate'].append( |
||
| 567 | (reporting[energy_category_id]['minimum'] - base[energy_category_id]['minimum']) / |
||
| 568 | base[energy_category_id]['minimum'] if (base[energy_category_id]['minimum'] is not None and |
||
| 569 | base[energy_category_id]['minimum'] > Decimal(0.0)) |
||
| 570 | else None) |
||
| 571 | result['reporting_period']['maximums'].append(reporting[energy_category_id]['maximum']) |
||
| 572 | result['reporting_period']['maximums_per_unit_area'].append( |
||
| 573 | reporting[energy_category_id]['maximum'] / store['area'] |
||
| 574 | if reporting[energy_category_id]['maximum'] is not None and |
||
| 575 | store['area'] is not None and |
||
| 576 | store['area'] > Decimal(0.0) |
||
| 577 | else None) |
||
| 578 | result['reporting_period']['maximums_increment_rate'].append( |
||
| 579 | (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / |
||
| 580 | base[energy_category_id]['maximum'] if (base[energy_category_id]['maximum'] is not None and |
||
| 581 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
||
| 582 | else None) |
||
| 583 | result['reporting_period']['stdevs'].append(reporting[energy_category_id]['stdev']) |
||
| 584 | result['reporting_period']['stdevs_per_unit_area'].append( |
||
| 585 | reporting[energy_category_id]['stdev'] / store['area'] |
||
| 586 | if reporting[energy_category_id]['stdev'] is not None and |
||
| 587 | store['area'] is not None and |
||
| 588 | store['area'] > Decimal(0.0) |
||
| 589 | else None) |
||
| 590 | result['reporting_period']['stdevs_increment_rate'].append( |
||
| 591 | (reporting[energy_category_id]['stdev'] - base[energy_category_id]['stdev']) / |
||
| 592 | base[energy_category_id]['stdev'] if (base[energy_category_id]['stdev'] is not None and |
||
| 593 | base[energy_category_id]['stdev'] > Decimal(0.0)) |
||
| 594 | else None) |
||
| 595 | result['reporting_period']['variances'].append(reporting[energy_category_id]['variance']) |
||
| 596 | result['reporting_period']['variances_per_unit_area'].append( |
||
| 597 | reporting[energy_category_id]['variance'] / store['area'] |
||
| 598 | if reporting[energy_category_id]['variance'] is not None and |
||
| 599 | store['area'] is not None and |
||
| 600 | store['area'] > Decimal(0.0) |
||
| 601 | else None) |
||
| 602 | result['reporting_period']['variances_increment_rate'].append( |
||
| 603 | (reporting[energy_category_id]['variance'] - base[energy_category_id]['variance']) / |
||
| 604 | base[energy_category_id]['variance'] if (base[energy_category_id]['variance'] is not None and |
||
| 605 | base[energy_category_id]['variance'] > Decimal(0.0)) |
||
| 606 | else None) |
||
| 607 | |||
| 608 | result['parameters'] = { |
||
| 609 | "names": parameters_data['names'], |
||
| 610 | "timestamps": parameters_data['timestamps'], |
||
| 611 | "values": parameters_data['values'] |
||
| 612 | } |
||
| 613 | |||
| 614 | # export result to Excel file and then encode the file to base64 string |
||
| 615 | result['excel_bytes_base64'] = excelexporters.storestatistics.export(result, |
||
| 616 | store['name'], |
||
| 617 | reporting_start_datetime_local, |
||
| 618 | reporting_end_datetime_local, |
||
| 619 | period_type) |
||
| 620 | resp.body = json.dumps(result) |
||
| 621 |