| Total Complexity | 132 |
| Total Lines | 676 |
| Duplicated Lines | 93.34 % |
| Changes | 0 | ||
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like reports.tenantload often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
| 1 | """ |
||
| 2 | Tenant Load Report API |
||
| 3 | |||
| 4 | This module provides REST API endpoints for generating tenant load reports. |
||
| 5 | It analyzes tenant load patterns and capacity utilization to provide |
||
| 6 | insights into load optimization and capacity planning opportunities. |
||
| 7 | |||
| 8 | Key Features: |
||
| 9 | - Tenant load analysis |
||
| 10 | - Base period vs reporting period comparison |
||
| 11 | - Load pattern identification |
||
| 12 | - Capacity utilization analysis |
||
| 13 | - Excel export functionality |
||
| 14 | - Load optimization insights |
||
| 15 | |||
| 16 | Report Components: |
||
| 17 | - Tenant load summary |
||
| 18 | - Base period comparison data |
||
| 19 | - Load pattern analysis |
||
| 20 | - Capacity utilization metrics |
||
| 21 | - Load optimization recommendations |
||
| 22 | - Peak load identification |
||
| 23 | |||
| 24 | The module uses Falcon framework for REST API and includes: |
||
| 25 | - Database queries for load data |
||
| 26 | - Load calculation algorithms |
||
| 27 | - Capacity analysis tools |
||
| 28 | - Excel export via excelexporters |
||
| 29 | - Multi-language support |
||
| 30 | - User authentication and authorization |
||
| 31 | """ |
||
| 32 | |||
| 33 | import re |
||
| 34 | from datetime import datetime, timedelta, timezone |
||
| 35 | from decimal import Decimal |
||
| 36 | import falcon |
||
| 37 | import mysql.connector |
||
| 38 | import simplejson as json |
||
| 39 | import config |
||
| 40 | import excelexporters.tenantload |
||
| 41 | from core import utilities |
||
| 42 | from core.useractivity import access_control, api_key_control |
||
| 43 | |||
| 44 | |||
| 45 | View Code Duplication | class Reporting: |
|
|
|
|||
| 46 | def __init__(self): |
||
| 47 | """"Initializes Reporting""" |
||
| 48 | pass |
||
| 49 | |||
| 50 | @staticmethod |
||
| 51 | def on_options(req, resp): |
||
| 52 | _ = req |
||
| 53 | resp.status = falcon.HTTP_200 |
||
| 54 | |||
| 55 | #################################################################################################################### |
||
| 56 | # PROCEDURES |
||
| 57 | # Step 1: valid parameters |
||
| 58 | # Step 2: query the tenant |
||
| 59 | # Step 3: query energy categories |
||
| 60 | # Step 4: query associated sensors |
||
| 61 | # Step 5: query associated points |
||
| 62 | # Step 6: query base period energy input |
||
| 63 | # Step 7: query reporting period energy input |
||
| 64 | # Step 8: query tariff data |
||
| 65 | # Step 9: query associated sensors and points data |
||
| 66 | # Step 10: construct the report |
||
| 67 | #################################################################################################################### |
||
| 68 | @staticmethod |
||
| 69 | def on_get(req, resp): |
||
| 70 | if 'API-KEY' not in req.headers or \ |
||
| 71 | not isinstance(req.headers['API-KEY'], str) or \ |
||
| 72 | len(str.strip(req.headers['API-KEY'])) == 0: |
||
| 73 | access_control(req) |
||
| 74 | else: |
||
| 75 | api_key_control(req) |
||
| 76 | print(req.params) |
||
| 77 | tenant_id = req.params.get('tenantid') |
||
| 78 | tenant_uuid = req.params.get('tenantuuid') |
||
| 79 | period_type = req.params.get('periodtype') |
||
| 80 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
||
| 81 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
||
| 82 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
||
| 83 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
||
| 84 | language = req.params.get('language') |
||
| 85 | quick_mode = req.params.get('quickmode') |
||
| 86 | |||
| 87 | ################################################################################################################ |
||
| 88 | # Step 1: valid parameters |
||
| 89 | ################################################################################################################ |
||
| 90 | if tenant_id is None and tenant_uuid is None: |
||
| 91 | raise falcon.HTTPError(status=falcon.HTTP_400, |
||
| 92 | title='API.BAD_REQUEST', |
||
| 93 | description='API.INVALID_TENANT_ID') |
||
| 94 | |||
| 95 | if tenant_id is not None: |
||
| 96 | tenant_id = str.strip(tenant_id) |
||
| 97 | if not tenant_id.isdigit() or int(tenant_id) <= 0: |
||
| 98 | raise falcon.HTTPError(status=falcon.HTTP_400, |
||
| 99 | title='API.BAD_REQUEST', |
||
| 100 | description='API.INVALID_TENANT_ID') |
||
| 101 | |||
| 102 | if tenant_uuid is not None: |
||
| 103 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
||
| 104 | match = regex.match(str.strip(tenant_uuid)) |
||
| 105 | if not bool(match): |
||
| 106 | raise falcon.HTTPError(status=falcon.HTTP_400, |
||
| 107 | title='API.BAD_REQUEST', |
||
| 108 | description='API.INVALID_TENANT_UUID') |
||
| 109 | |||
| 110 | if period_type is None: |
||
| 111 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 112 | description='API.INVALID_PERIOD_TYPE') |
||
| 113 | else: |
||
| 114 | period_type = str.strip(period_type) |
||
| 115 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
||
| 116 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 117 | description='API.INVALID_PERIOD_TYPE') |
||
| 118 | |||
| 119 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
| 120 | if config.utc_offset[0] == '-': |
||
| 121 | timezone_offset = -timezone_offset |
||
| 122 | |||
| 123 | base_start_datetime_utc = None |
||
| 124 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
||
| 125 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
||
| 126 | try: |
||
| 127 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
||
| 128 | except ValueError: |
||
| 129 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 130 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
||
| 131 | base_start_datetime_utc = \ |
||
| 132 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
||
| 133 | # nomalize the start datetime |
||
| 134 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
||
| 135 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
||
| 136 | else: |
||
| 137 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
||
| 138 | |||
| 139 | base_end_datetime_utc = None |
||
| 140 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
||
| 141 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
||
| 142 | try: |
||
| 143 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
||
| 144 | except ValueError: |
||
| 145 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 146 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
||
| 147 | base_end_datetime_utc = \ |
||
| 148 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
||
| 149 | |||
| 150 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
||
| 151 | base_start_datetime_utc >= base_end_datetime_utc: |
||
| 152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 153 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
||
| 154 | |||
| 155 | if reporting_period_start_datetime_local is None: |
||
| 156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 157 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
| 158 | else: |
||
| 159 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
||
| 160 | try: |
||
| 161 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
||
| 162 | '%Y-%m-%dT%H:%M:%S') |
||
| 163 | except ValueError: |
||
| 164 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 165 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
||
| 166 | reporting_start_datetime_utc = \ |
||
| 167 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
||
| 168 | # nomalize the start datetime |
||
| 169 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
||
| 170 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
||
| 171 | else: |
||
| 172 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
||
| 173 | |||
| 174 | if reporting_period_end_datetime_local is None: |
||
| 175 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 176 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
| 177 | else: |
||
| 178 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
||
| 179 | try: |
||
| 180 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
||
| 181 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
||
| 182 | timedelta(minutes=timezone_offset) |
||
| 183 | except ValueError: |
||
| 184 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 185 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
||
| 186 | |||
| 187 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
| 188 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
||
| 189 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
||
| 190 | |||
| 191 | # if turn quick mode on, do not return parameters data and excel file |
||
| 192 | is_quick_mode = False |
||
| 193 | if quick_mode is not None and \ |
||
| 194 | len(str.strip(quick_mode)) > 0 and \ |
||
| 195 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
||
| 196 | is_quick_mode = True |
||
| 197 | |||
| 198 | trans = utilities.get_translation(language) |
||
| 199 | trans.install() |
||
| 200 | _ = trans.gettext |
||
| 201 | |||
| 202 | ################################################################################################################ |
||
| 203 | # Step 2: query the tenant |
||
| 204 | ################################################################################################################ |
||
| 205 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
| 206 | cursor_system = cnx_system.cursor() |
||
| 207 | |||
| 208 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
||
| 209 | cursor_energy = cnx_energy.cursor() |
||
| 210 | |||
| 211 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
| 212 | cursor_historical = cnx_historical.cursor() |
||
| 213 | |||
| 214 | if tenant_id is not None: |
||
| 215 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
| 216 | " FROM tbl_tenants " |
||
| 217 | " WHERE id = %s ", (tenant_id,)) |
||
| 218 | row_tenant = cursor_system.fetchone() |
||
| 219 | elif tenant_uuid is not None: |
||
| 220 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
||
| 221 | " FROM tbl_tenants " |
||
| 222 | " WHERE uuid = %s ", (tenant_uuid,)) |
||
| 223 | row_tenant = cursor_system.fetchone() |
||
| 224 | |||
| 225 | if row_tenant is None: |
||
| 226 | if cursor_system: |
||
| 227 | cursor_system.close() |
||
| 228 | if cnx_system: |
||
| 229 | cnx_system.close() |
||
| 230 | |||
| 231 | if cursor_energy: |
||
| 232 | cursor_energy.close() |
||
| 233 | if cnx_energy: |
||
| 234 | cnx_energy.close() |
||
| 235 | |||
| 236 | if cursor_historical: |
||
| 237 | cursor_historical.close() |
||
| 238 | if cnx_historical: |
||
| 239 | cnx_historical.close() |
||
| 240 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.TENANT_NOT_FOUND') |
||
| 241 | |||
| 242 | tenant = dict() |
||
| 243 | tenant['id'] = row_tenant[0] |
||
| 244 | tenant['name'] = row_tenant[1] |
||
| 245 | tenant['area'] = row_tenant[2] |
||
| 246 | tenant['cost_center_id'] = row_tenant[3] |
||
| 247 | |||
| 248 | ################################################################################################################ |
||
| 249 | # Step 3: query energy categories |
||
| 250 | ################################################################################################################ |
||
| 251 | energy_category_set = set() |
||
| 252 | # query energy categories in base period |
||
| 253 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
| 254 | " FROM tbl_tenant_input_category_hourly " |
||
| 255 | " WHERE tenant_id = %s " |
||
| 256 | " AND start_datetime_utc >= %s " |
||
| 257 | " AND start_datetime_utc < %s ", |
||
| 258 | (tenant['id'], base_start_datetime_utc, base_end_datetime_utc)) |
||
| 259 | rows_energy_categories = cursor_energy.fetchall() |
||
| 260 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
||
| 261 | for row_energy_category in rows_energy_categories: |
||
| 262 | energy_category_set.add(row_energy_category[0]) |
||
| 263 | |||
| 264 | # query energy categories in reporting period |
||
| 265 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
||
| 266 | " FROM tbl_tenant_input_category_hourly " |
||
| 267 | " WHERE tenant_id = %s " |
||
| 268 | " AND start_datetime_utc >= %s " |
||
| 269 | " AND start_datetime_utc < %s ", |
||
| 270 | (tenant['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
||
| 271 | rows_energy_categories = cursor_energy.fetchall() |
||
| 272 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
||
| 273 | for row_energy_category in rows_energy_categories: |
||
| 274 | energy_category_set.add(row_energy_category[0]) |
||
| 275 | |||
| 276 | # query all energy categories in base period and reporting period |
||
| 277 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
||
| 278 | " FROM tbl_energy_categories " |
||
| 279 | " ORDER BY id ", ) |
||
| 280 | rows_energy_categories = cursor_system.fetchall() |
||
| 281 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
||
| 282 | if cursor_system: |
||
| 283 | cursor_system.close() |
||
| 284 | if cnx_system: |
||
| 285 | cnx_system.close() |
||
| 286 | |||
| 287 | if cursor_energy: |
||
| 288 | cursor_energy.close() |
||
| 289 | if cnx_energy: |
||
| 290 | cnx_energy.close() |
||
| 291 | |||
| 292 | if cursor_historical: |
||
| 293 | cursor_historical.close() |
||
| 294 | if cnx_historical: |
||
| 295 | cnx_historical.close() |
||
| 296 | raise falcon.HTTPError(status=falcon.HTTP_404, |
||
| 297 | title='API.NOT_FOUND', |
||
| 298 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
||
| 299 | energy_category_dict = dict() |
||
| 300 | for row_energy_category in rows_energy_categories: |
||
| 301 | if row_energy_category[0] in energy_category_set: |
||
| 302 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
||
| 303 | "unit_of_measure": row_energy_category[2], |
||
| 304 | "kgce": row_energy_category[3], |
||
| 305 | "kgco2e": row_energy_category[4]} |
||
| 306 | |||
| 307 | ################################################################################################################ |
||
| 308 | # Step 4: query associated sensors |
||
| 309 | ################################################################################################################ |
||
| 310 | point_list = list() |
||
| 311 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
| 312 | " FROM tbl_tenants t, tbl_sensors s, tbl_tenants_sensors ts, " |
||
| 313 | " tbl_points p, tbl_sensors_points sp " |
||
| 314 | " WHERE t.id = %s AND t.id = ts.tenant_id AND ts.sensor_id = s.id " |
||
| 315 | " AND s.id = sp.sensor_id AND sp.point_id = p.id " |
||
| 316 | " ORDER BY p.id ", (tenant['id'],)) |
||
| 317 | rows_points = cursor_system.fetchall() |
||
| 318 | if rows_points is not None and len(rows_points) > 0: |
||
| 319 | for row in rows_points: |
||
| 320 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
| 321 | |||
| 322 | ################################################################################################################ |
||
| 323 | # Step 5: query associated points |
||
| 324 | ################################################################################################################ |
||
| 325 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
||
| 326 | " FROM tbl_tenants t, tbl_tenants_points tp, tbl_points p " |
||
| 327 | " WHERE t.id = %s AND t.id = tp.tenant_id AND tp.point_id = p.id " |
||
| 328 | " ORDER BY p.id ", (tenant['id'],)) |
||
| 329 | rows_points = cursor_system.fetchall() |
||
| 330 | if rows_points is not None and len(rows_points) > 0: |
||
| 331 | for row in rows_points: |
||
| 332 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
||
| 333 | |||
| 334 | ################################################################################################################ |
||
| 335 | # Step 6: query base period energy input |
||
| 336 | ################################################################################################################ |
||
| 337 | base = dict() |
||
| 338 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 339 | for energy_category_id in energy_category_set: |
||
| 340 | base[energy_category_id] = dict() |
||
| 341 | base[energy_category_id]['timestamps'] = list() |
||
| 342 | base[energy_category_id]['sub_averages'] = list() |
||
| 343 | base[energy_category_id]['sub_maximums'] = list() |
||
| 344 | base[energy_category_id]['average'] = None |
||
| 345 | base[energy_category_id]['maximum'] = None |
||
| 346 | base[energy_category_id]['factor'] = None |
||
| 347 | |||
| 348 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
| 349 | " FROM tbl_tenant_input_category_hourly " |
||
| 350 | " WHERE tenant_id = %s " |
||
| 351 | " AND energy_category_id = %s " |
||
| 352 | " AND start_datetime_utc >= %s " |
||
| 353 | " AND start_datetime_utc < %s " |
||
| 354 | " ORDER BY start_datetime_utc ", |
||
| 355 | (tenant['id'], |
||
| 356 | energy_category_id, |
||
| 357 | base_start_datetime_utc, |
||
| 358 | base_end_datetime_utc)) |
||
| 359 | rows_tenant_hourly = cursor_energy.fetchall() |
||
| 360 | |||
| 361 | rows_tenant_periodically, \ |
||
| 362 | base[energy_category_id]['average'], \ |
||
| 363 | base[energy_category_id]['maximum'] = \ |
||
| 364 | utilities.averaging_hourly_data_by_period(rows_tenant_hourly, |
||
| 365 | base_start_datetime_utc, |
||
| 366 | base_end_datetime_utc, |
||
| 367 | period_type) |
||
| 368 | base[energy_category_id]['factor'] = \ |
||
| 369 | (base[energy_category_id]['average'] / base[energy_category_id]['maximum'] |
||
| 370 | if (base[energy_category_id]['average'] is not None and |
||
| 371 | base[energy_category_id]['maximum'] is not None and |
||
| 372 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
||
| 373 | else None) |
||
| 374 | |||
| 375 | for row_tenant_periodically in rows_tenant_periodically: |
||
| 376 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
| 377 | timedelta(minutes=timezone_offset) |
||
| 378 | if period_type == 'hourly': |
||
| 379 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
| 380 | elif period_type == 'daily': |
||
| 381 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 382 | elif period_type == 'weekly': |
||
| 383 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 384 | elif period_type == 'monthly': |
||
| 385 | current_datetime = current_datetime_local.isoformat()[0:7] |
||
| 386 | elif period_type == 'yearly': |
||
| 387 | current_datetime = current_datetime_local.isoformat()[0:4] |
||
| 388 | |||
| 389 | base[energy_category_id]['timestamps'].append(current_datetime) |
||
| 390 | base[energy_category_id]['sub_averages'].append(row_tenant_periodically[1]) |
||
| 391 | base[energy_category_id]['sub_maximums'].append(row_tenant_periodically[2]) |
||
| 392 | |||
| 393 | ################################################################################################################ |
||
| 394 | # Step 7: query reporting period energy input |
||
| 395 | ################################################################################################################ |
||
| 396 | reporting = dict() |
||
| 397 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 398 | for energy_category_id in energy_category_set: |
||
| 399 | reporting[energy_category_id] = dict() |
||
| 400 | reporting[energy_category_id]['timestamps'] = list() |
||
| 401 | reporting[energy_category_id]['sub_averages'] = list() |
||
| 402 | reporting[energy_category_id]['sub_maximums'] = list() |
||
| 403 | reporting[energy_category_id]['average'] = None |
||
| 404 | reporting[energy_category_id]['maximum'] = None |
||
| 405 | reporting[energy_category_id]['factor'] = None |
||
| 406 | |||
| 407 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
||
| 408 | " FROM tbl_tenant_input_category_hourly " |
||
| 409 | " WHERE tenant_id = %s " |
||
| 410 | " AND energy_category_id = %s " |
||
| 411 | " AND start_datetime_utc >= %s " |
||
| 412 | " AND start_datetime_utc < %s " |
||
| 413 | " ORDER BY start_datetime_utc ", |
||
| 414 | (tenant['id'], |
||
| 415 | energy_category_id, |
||
| 416 | reporting_start_datetime_utc, |
||
| 417 | reporting_end_datetime_utc)) |
||
| 418 | rows_tenant_hourly = cursor_energy.fetchall() |
||
| 419 | |||
| 420 | rows_tenant_periodically, \ |
||
| 421 | reporting[energy_category_id]['average'], \ |
||
| 422 | reporting[energy_category_id]['maximum'] = \ |
||
| 423 | utilities.averaging_hourly_data_by_period(rows_tenant_hourly, |
||
| 424 | reporting_start_datetime_utc, |
||
| 425 | reporting_end_datetime_utc, |
||
| 426 | period_type) |
||
| 427 | reporting[energy_category_id]['factor'] = \ |
||
| 428 | (reporting[energy_category_id]['average'] / reporting[energy_category_id]['maximum'] |
||
| 429 | if (reporting[energy_category_id]['average'] is not None and |
||
| 430 | reporting[energy_category_id]['maximum'] is not None and |
||
| 431 | reporting[energy_category_id]['maximum'] > Decimal(0.0)) |
||
| 432 | else None) |
||
| 433 | |||
| 434 | for row_tenant_periodically in rows_tenant_periodically: |
||
| 435 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
||
| 436 | timedelta(minutes=timezone_offset) |
||
| 437 | if period_type == 'hourly': |
||
| 438 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
| 439 | elif period_type == 'daily': |
||
| 440 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 441 | elif period_type == 'weekly': |
||
| 442 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 443 | elif period_type == 'monthly': |
||
| 444 | current_datetime = current_datetime_local.isoformat()[0:7] |
||
| 445 | elif period_type == 'yearly': |
||
| 446 | current_datetime = current_datetime_local.isoformat()[0:4] |
||
| 447 | |||
| 448 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
||
| 449 | reporting[energy_category_id]['sub_averages'].append(row_tenant_periodically[1]) |
||
| 450 | reporting[energy_category_id]['sub_maximums'].append(row_tenant_periodically[2]) |
||
| 451 | |||
| 452 | ################################################################################################################ |
||
| 453 | # Step 8: query tariff data |
||
| 454 | ################################################################################################################ |
||
| 455 | parameters_data = dict() |
||
| 456 | parameters_data['names'] = list() |
||
| 457 | parameters_data['timestamps'] = list() |
||
| 458 | parameters_data['values'] = list() |
||
| 459 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
||
| 460 | and not is_quick_mode: |
||
| 461 | for energy_category_id in energy_category_set: |
||
| 462 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(tenant['cost_center_id'], |
||
| 463 | energy_category_id, |
||
| 464 | reporting_start_datetime_utc, |
||
| 465 | reporting_end_datetime_utc) |
||
| 466 | tariff_timestamp_list = list() |
||
| 467 | tariff_value_list = list() |
||
| 468 | for k, v in energy_category_tariff_dict.items(): |
||
| 469 | # convert k from utc to local |
||
| 470 | k = k + timedelta(minutes=timezone_offset) |
||
| 471 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
||
| 472 | tariff_value_list.append(v) |
||
| 473 | |||
| 474 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
||
| 475 | parameters_data['timestamps'].append(tariff_timestamp_list) |
||
| 476 | parameters_data['values'].append(tariff_value_list) |
||
| 477 | |||
| 478 | ################################################################################################################ |
||
| 479 | # Step 9: query associated sensors and points data |
||
| 480 | ################################################################################################################ |
||
| 481 | if not is_quick_mode: |
||
| 482 | for point in point_list: |
||
| 483 | point_values = [] |
||
| 484 | point_timestamps = [] |
||
| 485 | if point['object_type'] == 'ENERGY_VALUE': |
||
| 486 | query = (" SELECT utc_date_time, actual_value " |
||
| 487 | " FROM tbl_energy_value " |
||
| 488 | " WHERE point_id = %s " |
||
| 489 | " AND utc_date_time BETWEEN %s AND %s " |
||
| 490 | " ORDER BY utc_date_time ") |
||
| 491 | cursor_historical.execute(query, (point['id'], |
||
| 492 | reporting_start_datetime_utc, |
||
| 493 | reporting_end_datetime_utc)) |
||
| 494 | rows = cursor_historical.fetchall() |
||
| 495 | |||
| 496 | if rows is not None and len(rows) > 0: |
||
| 497 | for row in rows: |
||
| 498 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
| 499 | timedelta(minutes=timezone_offset) |
||
| 500 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
| 501 | point_timestamps.append(current_datetime) |
||
| 502 | point_values.append(row[1]) |
||
| 503 | elif point['object_type'] == 'ANALOG_VALUE': |
||
| 504 | query = (" SELECT utc_date_time, actual_value " |
||
| 505 | " FROM tbl_analog_value " |
||
| 506 | " WHERE point_id = %s " |
||
| 507 | " AND utc_date_time BETWEEN %s AND %s " |
||
| 508 | " ORDER BY utc_date_time ") |
||
| 509 | cursor_historical.execute(query, (point['id'], |
||
| 510 | reporting_start_datetime_utc, |
||
| 511 | reporting_end_datetime_utc)) |
||
| 512 | rows = cursor_historical.fetchall() |
||
| 513 | |||
| 514 | if rows is not None and len(rows) > 0: |
||
| 515 | for row in rows: |
||
| 516 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
| 517 | timedelta(minutes=timezone_offset) |
||
| 518 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
| 519 | point_timestamps.append(current_datetime) |
||
| 520 | point_values.append(row[1]) |
||
| 521 | elif point['object_type'] == 'DIGITAL_VALUE': |
||
| 522 | query = (" SELECT utc_date_time, actual_value " |
||
| 523 | " FROM tbl_digital_value " |
||
| 524 | " WHERE point_id = %s " |
||
| 525 | " AND utc_date_time BETWEEN %s AND %s " |
||
| 526 | " ORDER BY utc_date_time ") |
||
| 527 | cursor_historical.execute(query, (point['id'], |
||
| 528 | reporting_start_datetime_utc, |
||
| 529 | reporting_end_datetime_utc)) |
||
| 530 | rows = cursor_historical.fetchall() |
||
| 531 | |||
| 532 | if rows is not None and len(rows) > 0: |
||
| 533 | for row in rows: |
||
| 534 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
||
| 535 | timedelta(minutes=timezone_offset) |
||
| 536 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
| 537 | point_timestamps.append(current_datetime) |
||
| 538 | point_values.append(row[1]) |
||
| 539 | |||
| 540 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
||
| 541 | parameters_data['timestamps'].append(point_timestamps) |
||
| 542 | parameters_data['values'].append(point_values) |
||
| 543 | |||
| 544 | ################################################################################################################ |
||
| 545 | # Step 10: construct the report |
||
| 546 | ################################################################################################################ |
||
| 547 | if cursor_system: |
||
| 548 | cursor_system.close() |
||
| 549 | if cnx_system: |
||
| 550 | cnx_system.close() |
||
| 551 | |||
| 552 | if cursor_energy: |
||
| 553 | cursor_energy.close() |
||
| 554 | if cnx_energy: |
||
| 555 | cnx_energy.close() |
||
| 556 | |||
| 557 | if cursor_historical: |
||
| 558 | cursor_historical.close() |
||
| 559 | if cnx_historical: |
||
| 560 | cnx_historical.close() |
||
| 561 | |||
| 562 | result = dict() |
||
| 563 | |||
| 564 | result['tenant'] = dict() |
||
| 565 | result['tenant']['name'] = tenant['name'] |
||
| 566 | result['tenant']['area'] = tenant['area'] |
||
| 567 | |||
| 568 | result['base_period'] = dict() |
||
| 569 | result['base_period']['names'] = list() |
||
| 570 | result['base_period']['units'] = list() |
||
| 571 | result['base_period']['timestamps'] = list() |
||
| 572 | result['base_period']['sub_averages'] = list() |
||
| 573 | result['base_period']['sub_maximums'] = list() |
||
| 574 | result['base_period']['averages'] = list() |
||
| 575 | result['base_period']['maximums'] = list() |
||
| 576 | result['base_period']['factors'] = list() |
||
| 577 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 578 | for energy_category_id in energy_category_set: |
||
| 579 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
| 580 | result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
| 581 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
||
| 582 | result['base_period']['sub_averages'].append(base[energy_category_id]['sub_averages']) |
||
| 583 | result['base_period']['sub_maximums'].append(base[energy_category_id]['sub_maximums']) |
||
| 584 | result['base_period']['averages'].append(base[energy_category_id]['average']) |
||
| 585 | result['base_period']['maximums'].append(base[energy_category_id]['maximum']) |
||
| 586 | result['base_period']['factors'].append(base[energy_category_id]['factor']) |
||
| 587 | |||
| 588 | result['reporting_period'] = dict() |
||
| 589 | result['reporting_period']['names'] = list() |
||
| 590 | result['reporting_period']['energy_category_ids'] = list() |
||
| 591 | result['reporting_period']['units'] = list() |
||
| 592 | result['reporting_period']['timestamps'] = list() |
||
| 593 | result['reporting_period']['sub_averages'] = list() |
||
| 594 | result['reporting_period']['sub_maximums'] = list() |
||
| 595 | result['reporting_period']['rates_of_sub_maximums'] = list() |
||
| 596 | result['reporting_period']['averages'] = list() |
||
| 597 | result['reporting_period']['averages_per_unit_area'] = list() |
||
| 598 | result['reporting_period']['averages_increment_rate'] = list() |
||
| 599 | result['reporting_period']['maximums'] = list() |
||
| 600 | result['reporting_period']['maximums_per_unit_area'] = list() |
||
| 601 | result['reporting_period']['maximums_increment_rate'] = list() |
||
| 602 | result['reporting_period']['factors'] = list() |
||
| 603 | result['reporting_period']['factors_increment_rate'] = list() |
||
| 604 | |||
| 605 | if energy_category_set is not None and len(energy_category_set) > 0: |
||
| 606 | for energy_category_id in energy_category_set: |
||
| 607 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
||
| 608 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
||
| 609 | result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
||
| 610 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
||
| 611 | result['reporting_period']['sub_averages'].append(reporting[energy_category_id]['sub_averages']) |
||
| 612 | result['reporting_period']['sub_maximums'].append(reporting[energy_category_id]['sub_maximums']) |
||
| 613 | result['reporting_period']['averages'].append(reporting[energy_category_id]['average']) |
||
| 614 | result['reporting_period']['averages_per_unit_area'].append( |
||
| 615 | reporting[energy_category_id]['average'] / tenant['area'] |
||
| 616 | if reporting[energy_category_id]['average'] is not None and |
||
| 617 | tenant['area'] is not None and |
||
| 618 | tenant['area'] > Decimal(0.0) |
||
| 619 | else None) |
||
| 620 | result['reporting_period']['averages_increment_rate'].append( |
||
| 621 | (reporting[energy_category_id]['average'] - base[energy_category_id]['average']) / |
||
| 622 | base[energy_category_id]['average'] if (reporting[energy_category_id]['average'] is not None and |
||
| 623 | base[energy_category_id]['average'] is not None and |
||
| 624 | base[energy_category_id]['average'] > Decimal(0.0)) |
||
| 625 | else None) |
||
| 626 | result['reporting_period']['maximums'].append(reporting[energy_category_id]['maximum']) |
||
| 627 | result['reporting_period']['maximums_increment_rate'].append( |
||
| 628 | (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / |
||
| 629 | base[energy_category_id]['maximum'] if (reporting[energy_category_id]['maximum'] is not None and |
||
| 630 | base[energy_category_id]['maximum'] is not None and |
||
| 631 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
||
| 632 | else None) |
||
| 633 | result['reporting_period']['maximums_per_unit_area'].append( |
||
| 634 | reporting[energy_category_id]['maximum'] / tenant['area'] |
||
| 635 | if reporting[energy_category_id]['maximum'] is not None and |
||
| 636 | tenant['area'] is not None and |
||
| 637 | tenant['area'] > Decimal(0.0) |
||
| 638 | else None) |
||
| 639 | result['reporting_period']['factors'].append(reporting[energy_category_id]['factor']) |
||
| 640 | result['reporting_period']['factors_increment_rate'].append( |
||
| 641 | (reporting[energy_category_id]['factor'] - base[energy_category_id]['factor']) / |
||
| 642 | base[energy_category_id]['factor'] if (reporting[energy_category_id]['factor'] is not None and |
||
| 643 | base[energy_category_id]['factor'] is not None and |
||
| 644 | base[energy_category_id]['factor'] > Decimal(0.0)) |
||
| 645 | else None) |
||
| 646 | |||
| 647 | rate = list() |
||
| 648 | for index, value in enumerate(reporting[energy_category_id]['sub_maximums']): |
||
| 649 | if index < len(base[energy_category_id]['sub_maximums']) \ |
||
| 650 | and base[energy_category_id]['sub_maximums'][index] != 0 and value != 0\ |
||
| 651 | and base[energy_category_id]['sub_maximums'][index] is not None and value is not None: |
||
| 652 | rate.append((value - base[energy_category_id]['sub_maximums'][index]) |
||
| 653 | / base[energy_category_id]['sub_maximums'][index]) |
||
| 654 | else: |
||
| 655 | rate.append(None) |
||
| 656 | result['reporting_period']['rates_of_sub_maximums'].append(rate) |
||
| 657 | |||
| 658 | result['parameters'] = { |
||
| 659 | "names": parameters_data['names'], |
||
| 660 | "timestamps": parameters_data['timestamps'], |
||
| 661 | "values": parameters_data['values'] |
||
| 662 | } |
||
| 663 | |||
| 664 | # export result to Excel file and then encode the file to base64 string |
||
| 665 | if not is_quick_mode: |
||
| 666 | result['excel_bytes_base64'] = excelexporters.tenantload.export(result, |
||
| 667 | tenant['name'], |
||
| 668 | base_period_start_datetime_local, |
||
| 669 | base_period_end_datetime_local, |
||
| 670 | reporting_period_start_datetime_local, |
||
| 671 | reporting_period_end_datetime_local, |
||
| 672 | period_type, |
||
| 673 | language) |
||
| 674 | |||
| 675 | resp.text = json.dumps(result) |
||
| 676 |