| @@ 45-737 (lines=693) @@ | ||
| 42 | from core.useractivity import access_control, api_key_control |
|
| 43 | ||
| 44 | ||
| 45 | class Reporting: |
|
| 46 | def __init__(self): |
|
| 47 | """"Initializes Reporting""" |
|
| 48 | pass |
|
| 49 | ||
| 50 | @staticmethod |
|
| 51 | def on_options(req, resp): |
|
| 52 | _ = req |
|
| 53 | resp.status = falcon.HTTP_200 |
|
| 54 | ||
| 55 | #################################################################################################################### |
|
| 56 | # PROCEDURES |
|
| 57 | # Step 1: valid parameters |
|
| 58 | # Step 2: query the shopfloor |
|
| 59 | # Step 3: query energy categories |
|
| 60 | # Step 4: query associated sensors |
|
| 61 | # Step 5: query associated points |
|
| 62 | # Step 6: query base period energy input |
|
| 63 | # Step 7: query reporting period energy input |
|
| 64 | # Step 8: query tariff data |
|
| 65 | # Step 9: query associated sensors and points data |
|
| 66 | # Step 10: construct the report |
|
| 67 | #################################################################################################################### |
|
| 68 | @staticmethod |
|
| 69 | def on_get(req, resp): |
|
| 70 | if 'API-KEY' not in req.headers or \ |
|
| 71 | not isinstance(req.headers['API-KEY'], str) or \ |
|
| 72 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
| 73 | access_control(req) |
|
| 74 | else: |
|
| 75 | api_key_control(req) |
|
| 76 | print(req.params) |
|
| 77 | shopfloor_id = req.params.get('shopfloorid') |
|
| 78 | shopfloor_uuid = req.params.get('shopflooruuid') |
|
| 79 | period_type = req.params.get('periodtype') |
|
| 80 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
| 81 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
| 82 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
| 83 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
| 84 | language = req.params.get('language') |
|
| 85 | quick_mode = req.params.get('quickmode') |
|
| 86 | ||
| 87 | ################################################################################################################ |
|
| 88 | # Step 1: valid parameters |
|
| 89 | ################################################################################################################ |
|
| 90 | if shopfloor_id is None and shopfloor_uuid is None: |
|
| 91 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 92 | title='API.BAD_REQUEST', |
|
| 93 | description='API.INVALID_SHOPFLOOR_ID') |
|
| 94 | ||
| 95 | if shopfloor_id is not None: |
|
| 96 | shopfloor_id = str.strip(shopfloor_id) |
|
| 97 | if not shopfloor_id.isdigit() or int(shopfloor_id) <= 0: |
|
| 98 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 99 | title='API.BAD_REQUEST', |
|
| 100 | description='API.INVALID_SHOPFLOOR_ID') |
|
| 101 | ||
| 102 | if shopfloor_uuid is not None: |
|
| 103 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
| 104 | match = regex.match(str.strip(shopfloor_uuid)) |
|
| 105 | if not bool(match): |
|
| 106 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 107 | title='API.BAD_REQUEST', |
|
| 108 | description='API.INVALID_SHOPFLOOR_UUID') |
|
| 109 | ||
| 110 | if period_type is None: |
|
| 111 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 112 | description='API.INVALID_PERIOD_TYPE') |
|
| 113 | else: |
|
| 114 | period_type = str.strip(period_type) |
|
| 115 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
| 116 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 117 | description='API.INVALID_PERIOD_TYPE') |
|
| 118 | ||
| 119 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
| 120 | if config.utc_offset[0] == '-': |
|
| 121 | timezone_offset = -timezone_offset |
|
| 122 | ||
| 123 | base_start_datetime_utc = None |
|
| 124 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
| 125 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
| 126 | try: |
|
| 127 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
| 128 | except ValueError: |
|
| 129 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 130 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
| 131 | base_start_datetime_utc = \ |
|
| 132 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 133 | # nomalize the start datetime |
|
| 134 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
| 135 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
| 136 | else: |
|
| 137 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
| 138 | ||
| 139 | base_end_datetime_utc = None |
|
| 140 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
| 141 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
| 142 | try: |
|
| 143 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
| 144 | except ValueError: |
|
| 145 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 146 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
| 147 | base_end_datetime_utc = \ |
|
| 148 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 149 | ||
| 150 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
| 151 | base_start_datetime_utc >= base_end_datetime_utc: |
|
| 152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 153 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
| 154 | ||
| 155 | if reporting_period_start_datetime_local is None: |
|
| 156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 157 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
| 158 | else: |
|
| 159 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
| 160 | try: |
|
| 161 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
| 162 | '%Y-%m-%dT%H:%M:%S') |
|
| 163 | except ValueError: |
|
| 164 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 165 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
| 166 | reporting_start_datetime_utc = \ |
|
| 167 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 168 | # nomalize the start datetime |
|
| 169 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
| 170 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
| 171 | else: |
|
| 172 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
| 173 | ||
| 174 | if reporting_period_end_datetime_local is None: |
|
| 175 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 176 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
| 177 | else: |
|
| 178 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
| 179 | try: |
|
| 180 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
| 181 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
| 182 | timedelta(minutes=timezone_offset) |
|
| 183 | except ValueError: |
|
| 184 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 185 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
| 186 | ||
| 187 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
| 188 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 189 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
| 190 | ||
| 191 | # if turn quick mode on, do not return parameters data and excel file |
|
| 192 | is_quick_mode = False |
|
| 193 | if quick_mode is not None and \ |
|
| 194 | len(str.strip(quick_mode)) > 0 and \ |
|
| 195 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
| 196 | is_quick_mode = True |
|
| 197 | ||
| 198 | trans = utilities.get_translation(language) |
|
| 199 | trans.install() |
|
| 200 | _ = trans.gettext |
|
| 201 | ||
| 202 | ################################################################################################################ |
|
| 203 | # Step 2: query the shopfloor |
|
| 204 | ################################################################################################################ |
|
| 205 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
| 206 | cursor_system = cnx_system.cursor() |
|
| 207 | ||
| 208 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
|
| 209 | cursor_energy = cnx_energy.cursor() |
|
| 210 | ||
| 211 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
| 212 | cursor_historical = cnx_historical.cursor() |
|
| 213 | if shopfloor_id is not None: |
|
| 214 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
| 215 | " FROM tbl_shopfloors " |
|
| 216 | " WHERE id = %s ", (shopfloor_id,)) |
|
| 217 | row_shopfloor = cursor_system.fetchone() |
|
| 218 | elif shopfloor_uuid is not None: |
|
| 219 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
| 220 | " FROM tbl_shopfloors " |
|
| 221 | " WHERE uuid = %s ", (shopfloor_uuid,)) |
|
| 222 | row_shopfloor = cursor_system.fetchone() |
|
| 223 | ||
| 224 | if row_shopfloor is None: |
|
| 225 | if cursor_system: |
|
| 226 | cursor_system.close() |
|
| 227 | if cnx_system: |
|
| 228 | cnx_system.close() |
|
| 229 | ||
| 230 | if cursor_energy: |
|
| 231 | cursor_energy.close() |
|
| 232 | if cnx_energy: |
|
| 233 | cnx_energy.close() |
|
| 234 | ||
| 235 | if cursor_historical: |
|
| 236 | cursor_historical.close() |
|
| 237 | if cnx_historical: |
|
| 238 | cnx_historical.close() |
|
| 239 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.SHOPFLOOR_NOT_FOUND') |
|
| 240 | ||
| 241 | shopfloor = dict() |
|
| 242 | shopfloor['id'] = row_shopfloor[0] |
|
| 243 | shopfloor['name'] = row_shopfloor[1] |
|
| 244 | shopfloor['area'] = row_shopfloor[2] |
|
| 245 | shopfloor['cost_center_id'] = row_shopfloor[3] |
|
| 246 | ||
| 247 | ################################################################################################################ |
|
| 248 | # Step 3: query energy categories |
|
| 249 | ################################################################################################################ |
|
| 250 | energy_category_set = set() |
|
| 251 | # query energy categories in base period |
|
| 252 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
|
| 253 | " FROM tbl_shopfloor_input_category_hourly " |
|
| 254 | " WHERE shopfloor_id = %s " |
|
| 255 | " AND start_datetime_utc >= %s " |
|
| 256 | " AND start_datetime_utc < %s ", |
|
| 257 | (shopfloor['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
| 258 | rows_energy_categories = cursor_energy.fetchall() |
|
| 259 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
| 260 | for row_energy_category in rows_energy_categories: |
|
| 261 | energy_category_set.add(row_energy_category[0]) |
|
| 262 | ||
| 263 | # query energy categories in reporting period |
|
| 264 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
|
| 265 | " FROM tbl_shopfloor_input_category_hourly " |
|
| 266 | " WHERE shopfloor_id = %s " |
|
| 267 | " AND start_datetime_utc >= %s " |
|
| 268 | " AND start_datetime_utc < %s ", |
|
| 269 | (shopfloor['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
| 270 | rows_energy_categories = cursor_energy.fetchall() |
|
| 271 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
| 272 | for row_energy_category in rows_energy_categories: |
|
| 273 | energy_category_set.add(row_energy_category[0]) |
|
| 274 | ||
| 275 | # query all energy categories in base period and reporting period |
|
| 276 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
| 277 | " FROM tbl_energy_categories " |
|
| 278 | " ORDER BY id ", ) |
|
| 279 | rows_energy_categories = cursor_system.fetchall() |
|
| 280 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
| 281 | if cursor_system: |
|
| 282 | cursor_system.close() |
|
| 283 | if cnx_system: |
|
| 284 | cnx_system.close() |
|
| 285 | ||
| 286 | if cursor_energy: |
|
| 287 | cursor_energy.close() |
|
| 288 | if cnx_energy: |
|
| 289 | cnx_energy.close() |
|
| 290 | ||
| 291 | if cursor_historical: |
|
| 292 | cursor_historical.close() |
|
| 293 | if cnx_historical: |
|
| 294 | cnx_historical.close() |
|
| 295 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
| 296 | title='API.NOT_FOUND', |
|
| 297 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
| 298 | energy_category_dict = dict() |
|
| 299 | for row_energy_category in rows_energy_categories: |
|
| 300 | if row_energy_category[0] in energy_category_set: |
|
| 301 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
| 302 | "unit_of_measure": row_energy_category[2], |
|
| 303 | "kgce": row_energy_category[3], |
|
| 304 | "kgco2e": row_energy_category[4]} |
|
| 305 | ||
| 306 | ################################################################################################################ |
|
| 307 | # Step 4: query associated sensors |
|
| 308 | ################################################################################################################ |
|
| 309 | point_list = list() |
|
| 310 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
| 311 | " FROM tbl_shopfloors st, tbl_sensors se, tbl_shopfloors_sensors ss, " |
|
| 312 | " tbl_points p, tbl_sensors_points sp " |
|
| 313 | " WHERE st.id = %s AND st.id = ss.shopfloor_id AND ss.sensor_id = se.id " |
|
| 314 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
|
| 315 | " ORDER BY p.id ", (shopfloor['id'],)) |
|
| 316 | rows_points = cursor_system.fetchall() |
|
| 317 | if rows_points is not None and len(rows_points) > 0: |
|
| 318 | for row in rows_points: |
|
| 319 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
| 320 | ||
| 321 | ################################################################################################################ |
|
| 322 | # Step 5: query associated points |
|
| 323 | ################################################################################################################ |
|
| 324 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
| 325 | " FROM tbl_shopfloors s, tbl_shopfloors_points sp, tbl_points p " |
|
| 326 | " WHERE s.id = %s AND s.id = sp.shopfloor_id AND sp.point_id = p.id " |
|
| 327 | " ORDER BY p.id ", (shopfloor['id'],)) |
|
| 328 | rows_points = cursor_system.fetchall() |
|
| 329 | if rows_points is not None and len(rows_points) > 0: |
|
| 330 | for row in rows_points: |
|
| 331 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
| 332 | ||
| 333 | ################################################################################################################ |
|
| 334 | # Step 6: query base period energy input |
|
| 335 | ################################################################################################################ |
|
| 336 | base = dict() |
|
| 337 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 338 | for energy_category_id in energy_category_set: |
|
| 339 | base[energy_category_id] = dict() |
|
| 340 | base[energy_category_id]['timestamps'] = list() |
|
| 341 | base[energy_category_id]['values'] = list() |
|
| 342 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
| 343 | base[energy_category_id]['mean'] = None |
|
| 344 | base[energy_category_id]['median'] = None |
|
| 345 | base[energy_category_id]['minimum'] = None |
|
| 346 | base[energy_category_id]['maximum'] = None |
|
| 347 | base[energy_category_id]['stdev'] = None |
|
| 348 | base[energy_category_id]['variance'] = None |
|
| 349 | ||
| 350 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
|
| 351 | " FROM tbl_shopfloor_input_category_hourly " |
|
| 352 | " WHERE shopfloor_id = %s " |
|
| 353 | " AND energy_category_id = %s " |
|
| 354 | " AND start_datetime_utc >= %s " |
|
| 355 | " AND start_datetime_utc < %s " |
|
| 356 | " ORDER BY start_datetime_utc ", |
|
| 357 | (shopfloor['id'], |
|
| 358 | energy_category_id, |
|
| 359 | base_start_datetime_utc, |
|
| 360 | base_end_datetime_utc)) |
|
| 361 | rows_shopfloor_hourly = cursor_energy.fetchall() |
|
| 362 | ||
| 363 | rows_shopfloor_periodically, \ |
|
| 364 | base[energy_category_id]['mean'], \ |
|
| 365 | base[energy_category_id]['median'], \ |
|
| 366 | base[energy_category_id]['minimum'], \ |
|
| 367 | base[energy_category_id]['maximum'], \ |
|
| 368 | base[energy_category_id]['stdev'], \ |
|
| 369 | base[energy_category_id]['variance'] = \ |
|
| 370 | utilities.statistics_hourly_data_by_period(rows_shopfloor_hourly, |
|
| 371 | base_start_datetime_utc, |
|
| 372 | base_end_datetime_utc, |
|
| 373 | period_type) |
|
| 374 | ||
| 375 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
|
| 376 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
| 377 | timedelta(minutes=timezone_offset) |
|
| 378 | if period_type == 'hourly': |
|
| 379 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 380 | elif period_type == 'daily': |
|
| 381 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 382 | elif period_type == 'weekly': |
|
| 383 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 384 | elif period_type == 'monthly': |
|
| 385 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
| 386 | elif period_type == 'yearly': |
|
| 387 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
| 388 | ||
| 389 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
|
| 390 | else row_shopfloor_periodically[1] |
|
| 391 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
| 392 | base[energy_category_id]['values'].append(actual_value) |
|
| 393 | base[energy_category_id]['subtotal'] += actual_value |
|
| 394 | ||
| 395 | ################################################################################################################ |
|
| 396 | # Step 7: query reporting period energy input |
|
| 397 | ################################################################################################################ |
|
| 398 | reporting = dict() |
|
| 399 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 400 | for energy_category_id in energy_category_set: |
|
| 401 | reporting[energy_category_id] = dict() |
|
| 402 | reporting[energy_category_id]['timestamps'] = list() |
|
| 403 | reporting[energy_category_id]['values'] = list() |
|
| 404 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
| 405 | reporting[energy_category_id]['mean'] = None |
|
| 406 | reporting[energy_category_id]['median'] = None |
|
| 407 | reporting[energy_category_id]['minimum'] = None |
|
| 408 | reporting[energy_category_id]['maximum'] = None |
|
| 409 | reporting[energy_category_id]['stdev'] = None |
|
| 410 | reporting[energy_category_id]['variance'] = None |
|
| 411 | ||
| 412 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
|
| 413 | " FROM tbl_shopfloor_input_category_hourly " |
|
| 414 | " WHERE shopfloor_id = %s " |
|
| 415 | " AND energy_category_id = %s " |
|
| 416 | " AND start_datetime_utc >= %s " |
|
| 417 | " AND start_datetime_utc < %s " |
|
| 418 | " ORDER BY start_datetime_utc ", |
|
| 419 | (shopfloor['id'], |
|
| 420 | energy_category_id, |
|
| 421 | reporting_start_datetime_utc, |
|
| 422 | reporting_end_datetime_utc)) |
|
| 423 | rows_shopfloor_hourly = cursor_energy.fetchall() |
|
| 424 | ||
| 425 | rows_shopfloor_periodically, \ |
|
| 426 | reporting[energy_category_id]['mean'], \ |
|
| 427 | reporting[energy_category_id]['median'], \ |
|
| 428 | reporting[energy_category_id]['minimum'], \ |
|
| 429 | reporting[energy_category_id]['maximum'], \ |
|
| 430 | reporting[energy_category_id]['stdev'], \ |
|
| 431 | reporting[energy_category_id]['variance'] = \ |
|
| 432 | utilities.statistics_hourly_data_by_period(rows_shopfloor_hourly, |
|
| 433 | reporting_start_datetime_utc, |
|
| 434 | reporting_end_datetime_utc, |
|
| 435 | period_type) |
|
| 436 | ||
| 437 | for row_shopfloor_periodically in rows_shopfloor_periodically: |
|
| 438 | current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
| 439 | timedelta(minutes=timezone_offset) |
|
| 440 | if period_type == 'hourly': |
|
| 441 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 442 | elif period_type == 'daily': |
|
| 443 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 444 | elif period_type == 'weekly': |
|
| 445 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 446 | elif period_type == 'monthly': |
|
| 447 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
| 448 | elif period_type == 'yearly': |
|
| 449 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
| 450 | ||
| 451 | actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ |
|
| 452 | else row_shopfloor_periodically[1] |
|
| 453 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
| 454 | reporting[energy_category_id]['values'].append(actual_value) |
|
| 455 | reporting[energy_category_id]['subtotal'] += actual_value |
|
| 456 | ||
| 457 | ################################################################################################################ |
|
| 458 | # Step 8: query tariff data |
|
| 459 | ################################################################################################################ |
|
| 460 | parameters_data = dict() |
|
| 461 | parameters_data['names'] = list() |
|
| 462 | parameters_data['timestamps'] = list() |
|
| 463 | parameters_data['values'] = list() |
|
| 464 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
| 465 | and not is_quick_mode: |
|
| 466 | for energy_category_id in energy_category_set: |
|
| 467 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(shopfloor['cost_center_id'], |
|
| 468 | energy_category_id, |
|
| 469 | reporting_start_datetime_utc, |
|
| 470 | reporting_end_datetime_utc) |
|
| 471 | tariff_timestamp_list = list() |
|
| 472 | tariff_value_list = list() |
|
| 473 | for k, v in energy_category_tariff_dict.items(): |
|
| 474 | # convert k from utc to local |
|
| 475 | k = k + timedelta(minutes=timezone_offset) |
|
| 476 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
| 477 | tariff_value_list.append(v) |
|
| 478 | ||
| 479 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
| 480 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
| 481 | parameters_data['values'].append(tariff_value_list) |
|
| 482 | ||
| 483 | ################################################################################################################ |
|
| 484 | # Step 9: query associated sensors and points data |
|
| 485 | ################################################################################################################ |
|
| 486 | if not is_quick_mode: |
|
| 487 | for point in point_list: |
|
| 488 | point_values = [] |
|
| 489 | point_timestamps = [] |
|
| 490 | if point['object_type'] == 'ENERGY_VALUE': |
|
| 491 | query = (" SELECT utc_date_time, actual_value " |
|
| 492 | " FROM tbl_energy_value " |
|
| 493 | " WHERE point_id = %s " |
|
| 494 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 495 | " ORDER BY utc_date_time ") |
|
| 496 | cursor_historical.execute(query, (point['id'], |
|
| 497 | reporting_start_datetime_utc, |
|
| 498 | reporting_end_datetime_utc)) |
|
| 499 | rows = cursor_historical.fetchall() |
|
| 500 | ||
| 501 | if rows is not None and len(rows) > 0: |
|
| 502 | for row in rows: |
|
| 503 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 504 | timedelta(minutes=timezone_offset) |
|
| 505 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 506 | point_timestamps.append(current_datetime) |
|
| 507 | point_values.append(row[1]) |
|
| 508 | elif point['object_type'] == 'ANALOG_VALUE': |
|
| 509 | query = (" SELECT utc_date_time, actual_value " |
|
| 510 | " FROM tbl_analog_value " |
|
| 511 | " WHERE point_id = %s " |
|
| 512 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 513 | " ORDER BY utc_date_time ") |
|
| 514 | cursor_historical.execute(query, (point['id'], |
|
| 515 | reporting_start_datetime_utc, |
|
| 516 | reporting_end_datetime_utc)) |
|
| 517 | rows = cursor_historical.fetchall() |
|
| 518 | ||
| 519 | if rows is not None and len(rows) > 0: |
|
| 520 | for row in rows: |
|
| 521 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 522 | timedelta(minutes=timezone_offset) |
|
| 523 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 524 | point_timestamps.append(current_datetime) |
|
| 525 | point_values.append(row[1]) |
|
| 526 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
| 527 | query = (" SELECT utc_date_time, actual_value " |
|
| 528 | " FROM tbl_digital_value " |
|
| 529 | " WHERE point_id = %s " |
|
| 530 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 531 | " ORDER BY utc_date_time ") |
|
| 532 | cursor_historical.execute(query, (point['id'], |
|
| 533 | reporting_start_datetime_utc, |
|
| 534 | reporting_end_datetime_utc)) |
|
| 535 | rows = cursor_historical.fetchall() |
|
| 536 | ||
| 537 | if rows is not None and len(rows) > 0: |
|
| 538 | for row in rows: |
|
| 539 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 540 | timedelta(minutes=timezone_offset) |
|
| 541 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 542 | point_timestamps.append(current_datetime) |
|
| 543 | point_values.append(row[1]) |
|
| 544 | ||
| 545 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
| 546 | parameters_data['timestamps'].append(point_timestamps) |
|
| 547 | parameters_data['values'].append(point_values) |
|
| 548 | ||
| 549 | ################################################################################################################ |
|
| 550 | # Step 10: construct the report |
|
| 551 | ################################################################################################################ |
|
| 552 | if cursor_system: |
|
| 553 | cursor_system.close() |
|
| 554 | if cnx_system: |
|
| 555 | cnx_system.close() |
|
| 556 | ||
| 557 | if cursor_energy: |
|
| 558 | cursor_energy.close() |
|
| 559 | if cnx_energy: |
|
| 560 | cnx_energy.close() |
|
| 561 | ||
| 562 | if cursor_historical: |
|
| 563 | cursor_historical.close() |
|
| 564 | if cnx_historical: |
|
| 565 | cnx_historical.close() |
|
| 566 | ||
| 567 | result = dict() |
|
| 568 | ||
| 569 | result['shopfloor'] = dict() |
|
| 570 | result['shopfloor']['name'] = shopfloor['name'] |
|
| 571 | result['shopfloor']['area'] = shopfloor['area'] |
|
| 572 | ||
| 573 | result['base_period'] = dict() |
|
| 574 | result['base_period']['names'] = list() |
|
| 575 | result['base_period']['units'] = list() |
|
| 576 | result['base_period']['timestamps'] = list() |
|
| 577 | result['base_period']['values'] = list() |
|
| 578 | result['base_period']['subtotals'] = list() |
|
| 579 | result['base_period']['means'] = list() |
|
| 580 | result['base_period']['medians'] = list() |
|
| 581 | result['base_period']['minimums'] = list() |
|
| 582 | result['base_period']['maximums'] = list() |
|
| 583 | result['base_period']['stdevs'] = list() |
|
| 584 | result['base_period']['variances'] = list() |
|
| 585 | ||
| 586 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 587 | for energy_category_id in energy_category_set: |
|
| 588 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
| 589 | result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
|
| 590 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
| 591 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
| 592 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
| 593 | result['base_period']['means'].append(base[energy_category_id]['mean']) |
|
| 594 | result['base_period']['medians'].append(base[energy_category_id]['median']) |
|
| 595 | result['base_period']['minimums'].append(base[energy_category_id]['minimum']) |
|
| 596 | result['base_period']['maximums'].append(base[energy_category_id]['maximum']) |
|
| 597 | result['base_period']['stdevs'].append(base[energy_category_id]['stdev']) |
|
| 598 | result['base_period']['variances'].append(base[energy_category_id]['variance']) |
|
| 599 | ||
| 600 | result['reporting_period'] = dict() |
|
| 601 | result['reporting_period']['names'] = list() |
|
| 602 | result['reporting_period']['energy_category_ids'] = list() |
|
| 603 | result['reporting_period']['units'] = list() |
|
| 604 | result['reporting_period']['timestamps'] = list() |
|
| 605 | result['reporting_period']['values'] = list() |
|
| 606 | result['reporting_period']['rates'] = list() |
|
| 607 | result['reporting_period']['subtotals'] = list() |
|
| 608 | result['reporting_period']['means'] = list() |
|
| 609 | result['reporting_period']['means_per_unit_area'] = list() |
|
| 610 | result['reporting_period']['means_increment_rate'] = list() |
|
| 611 | result['reporting_period']['medians'] = list() |
|
| 612 | result['reporting_period']['medians_per_unit_area'] = list() |
|
| 613 | result['reporting_period']['medians_increment_rate'] = list() |
|
| 614 | result['reporting_period']['minimums'] = list() |
|
| 615 | result['reporting_period']['minimums_per_unit_area'] = list() |
|
| 616 | result['reporting_period']['minimums_increment_rate'] = list() |
|
| 617 | result['reporting_period']['maximums'] = list() |
|
| 618 | result['reporting_period']['maximums_per_unit_area'] = list() |
|
| 619 | result['reporting_period']['maximums_increment_rate'] = list() |
|
| 620 | result['reporting_period']['stdevs'] = list() |
|
| 621 | result['reporting_period']['stdevs_per_unit_area'] = list() |
|
| 622 | result['reporting_period']['stdevs_increment_rate'] = list() |
|
| 623 | result['reporting_period']['variances'] = list() |
|
| 624 | result['reporting_period']['variances_per_unit_area'] = list() |
|
| 625 | result['reporting_period']['variances_increment_rate'] = list() |
|
| 626 | ||
| 627 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 628 | for energy_category_id in energy_category_set: |
|
| 629 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
| 630 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
| 631 | result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
|
| 632 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
| 633 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
| 634 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
| 635 | result['reporting_period']['means'].append(reporting[energy_category_id]['mean']) |
|
| 636 | result['reporting_period']['means_per_unit_area'].append( |
|
| 637 | reporting[energy_category_id]['mean'] / shopfloor['area'] |
|
| 638 | if reporting[energy_category_id]['mean'] is not None and |
|
| 639 | shopfloor['area'] is not None and |
|
| 640 | shopfloor['area'] > Decimal(0.0) |
|
| 641 | else None) |
|
| 642 | result['reporting_period']['means_increment_rate'].append( |
|
| 643 | (reporting[energy_category_id]['mean'] - base[energy_category_id]['mean']) / |
|
| 644 | base[energy_category_id]['mean'] if (base[energy_category_id]['mean'] is not None and |
|
| 645 | base[energy_category_id]['mean'] > Decimal(0.0)) |
|
| 646 | else None) |
|
| 647 | result['reporting_period']['medians'].append(reporting[energy_category_id]['median']) |
|
| 648 | result['reporting_period']['medians_per_unit_area'].append( |
|
| 649 | reporting[energy_category_id]['median'] / shopfloor['area'] |
|
| 650 | if reporting[energy_category_id]['median'] is not None and |
|
| 651 | shopfloor['area'] is not None and |
|
| 652 | shopfloor['area'] > Decimal(0.0) |
|
| 653 | else None) |
|
| 654 | result['reporting_period']['medians_increment_rate'].append( |
|
| 655 | (reporting[energy_category_id]['median'] - base[energy_category_id]['median']) / |
|
| 656 | base[energy_category_id]['median'] if (base[energy_category_id]['median'] is not None and |
|
| 657 | base[energy_category_id]['median'] > Decimal(0.0)) |
|
| 658 | else None) |
|
| 659 | result['reporting_period']['minimums'].append(reporting[energy_category_id]['minimum']) |
|
| 660 | result['reporting_period']['minimums_per_unit_area'].append( |
|
| 661 | reporting[energy_category_id]['minimum'] / shopfloor['area'] |
|
| 662 | if reporting[energy_category_id]['minimum'] is not None and |
|
| 663 | shopfloor['area'] is not None and |
|
| 664 | shopfloor['area'] > Decimal(0.0) |
|
| 665 | else None) |
|
| 666 | result['reporting_period']['minimums_increment_rate'].append( |
|
| 667 | (reporting[energy_category_id]['minimum'] - base[energy_category_id]['minimum']) / |
|
| 668 | base[energy_category_id]['minimum'] if (base[energy_category_id]['minimum'] is not None and |
|
| 669 | base[energy_category_id]['minimum'] > Decimal(0.0)) |
|
| 670 | else None) |
|
| 671 | result['reporting_period']['maximums'].append(reporting[energy_category_id]['maximum']) |
|
| 672 | result['reporting_period']['maximums_per_unit_area'].append( |
|
| 673 | reporting[energy_category_id]['maximum'] / shopfloor['area'] |
|
| 674 | if reporting[energy_category_id]['maximum'] is not None and |
|
| 675 | shopfloor['area'] is not None and |
|
| 676 | shopfloor['area'] > Decimal(0.0) |
|
| 677 | else None) |
|
| 678 | result['reporting_period']['maximums_increment_rate'].append( |
|
| 679 | (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / |
|
| 680 | base[energy_category_id]['maximum'] if (base[energy_category_id]['maximum'] is not None and |
|
| 681 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
|
| 682 | else None) |
|
| 683 | result['reporting_period']['stdevs'].append(reporting[energy_category_id]['stdev']) |
|
| 684 | result['reporting_period']['stdevs_per_unit_area'].append( |
|
| 685 | reporting[energy_category_id]['stdev'] / shopfloor['area'] |
|
| 686 | if reporting[energy_category_id]['stdev'] is not None and |
|
| 687 | shopfloor['area'] is not None and |
|
| 688 | shopfloor['area'] > Decimal(0.0) |
|
| 689 | else None) |
|
| 690 | result['reporting_period']['stdevs_increment_rate'].append( |
|
| 691 | (reporting[energy_category_id]['stdev'] - base[energy_category_id]['stdev']) / |
|
| 692 | base[energy_category_id]['stdev'] if (base[energy_category_id]['stdev'] is not None and |
|
| 693 | base[energy_category_id]['stdev'] > Decimal(0.0)) |
|
| 694 | else None) |
|
| 695 | result['reporting_period']['variances'].append(reporting[energy_category_id]['variance']) |
|
| 696 | result['reporting_period']['variances_per_unit_area'].append( |
|
| 697 | reporting[energy_category_id]['variance'] / shopfloor['area'] |
|
| 698 | if reporting[energy_category_id]['variance'] is not None and |
|
| 699 | shopfloor['area'] is not None and |
|
| 700 | shopfloor['area'] > Decimal(0.0) |
|
| 701 | else None) |
|
| 702 | result['reporting_period']['variances_increment_rate'].append( |
|
| 703 | (reporting[energy_category_id]['variance'] - base[energy_category_id]['variance']) / |
|
| 704 | base[energy_category_id]['variance'] if (base[energy_category_id]['variance'] is not None and |
|
| 705 | base[energy_category_id]['variance'] > Decimal(0.0)) |
|
| 706 | else None) |
|
| 707 | ||
| 708 | rate = list() |
|
| 709 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
| 710 | if index < len(base[energy_category_id]['values']) \ |
|
| 711 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
| 712 | rate.append((value - base[energy_category_id]['values'][index]) |
|
| 713 | / base[energy_category_id]['values'][index]) |
|
| 714 | else: |
|
| 715 | rate.append(None) |
|
| 716 | result['reporting_period']['rates'].append(rate) |
|
| 717 | ||
| 718 | result['parameters'] = { |
|
| 719 | "names": parameters_data['names'], |
|
| 720 | "timestamps": parameters_data['timestamps'], |
|
| 721 | "values": parameters_data['values'] |
|
| 722 | } |
|
| 723 | ||
| 724 | # export result to Excel file and then encode the file to base64 string |
|
| 725 | result['excel_bytes_base64'] = None |
|
| 726 | if not is_quick_mode: |
|
| 727 | result['excel_bytes_base64'] = \ |
|
| 728 | excelexporters.shopfloorstatistics.export(result, |
|
| 729 | shopfloor['name'], |
|
| 730 | base_period_start_datetime_local, |
|
| 731 | base_period_end_datetime_local, |
|
| 732 | reporting_period_start_datetime_local, |
|
| 733 | reporting_period_end_datetime_local, |
|
| 734 | period_type, |
|
| 735 | language) |
|
| 736 | ||
| 737 | resp.text = json.dumps(result) |
|
| 738 | ||
| @@ 45-733 (lines=689) @@ | ||
| 42 | from core.useractivity import access_control, api_key_control |
|
| 43 | ||
| 44 | ||
| 45 | class Reporting: |
|
| 46 | def __init__(self): |
|
| 47 | """"Initializes Reporting""" |
|
| 48 | pass |
|
| 49 | ||
| 50 | @staticmethod |
|
| 51 | def on_options(req, resp): |
|
| 52 | _ = req |
|
| 53 | resp.status = falcon.HTTP_200 |
|
| 54 | ||
| 55 | #################################################################################################################### |
|
| 56 | # PROCEDURES |
|
| 57 | # Step 1: valid parameters |
|
| 58 | # Step 2: query the store |
|
| 59 | # Step 3: query energy categories |
|
| 60 | # Step 4: query associated sensors |
|
| 61 | # Step 5: query associated points |
|
| 62 | # Step 6: query base period energy input |
|
| 63 | # Step 7: query reporting period energy input |
|
| 64 | # Step 8: query tariff data |
|
| 65 | # Step 9: query associated sensors and points data |
|
| 66 | # Step 10: construct the report |
|
| 67 | #################################################################################################################### |
|
| 68 | @staticmethod |
|
| 69 | def on_get(req, resp): |
|
| 70 | if 'API-KEY' not in req.headers or \ |
|
| 71 | not isinstance(req.headers['API-KEY'], str) or \ |
|
| 72 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
| 73 | access_control(req) |
|
| 74 | else: |
|
| 75 | api_key_control(req) |
|
| 76 | print(req.params) |
|
| 77 | store_id = req.params.get('storeid') |
|
| 78 | store_uuid = req.params.get('storeuuid') |
|
| 79 | period_type = req.params.get('periodtype') |
|
| 80 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
| 81 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
| 82 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
| 83 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
| 84 | language = req.params.get('language') |
|
| 85 | quick_mode = req.params.get('quickmode') |
|
| 86 | ||
| 87 | ################################################################################################################ |
|
| 88 | # Step 1: valid parameters |
|
| 89 | ################################################################################################################ |
|
| 90 | if store_id is None and store_uuid is None: |
|
| 91 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 92 | title='API.BAD_REQUEST', |
|
| 93 | description='API.INVALID_STORE_ID') |
|
| 94 | ||
| 95 | if store_id is not None: |
|
| 96 | store_id = str.strip(store_id) |
|
| 97 | if not store_id.isdigit() or int(store_id) <= 0: |
|
| 98 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 99 | title='API.BAD_REQUEST', |
|
| 100 | description='API.INVALID_STORE_ID') |
|
| 101 | ||
| 102 | if store_uuid is not None: |
|
| 103 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
| 104 | match = regex.match(str.strip(store_uuid)) |
|
| 105 | if not bool(match): |
|
| 106 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 107 | title='API.BAD_REQUEST', |
|
| 108 | description='API.INVALID_STORE_UUID') |
|
| 109 | ||
| 110 | if period_type is None: |
|
| 111 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 112 | description='API.INVALID_PERIOD_TYPE') |
|
| 113 | else: |
|
| 114 | period_type = str.strip(period_type) |
|
| 115 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
| 116 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 117 | description='API.INVALID_PERIOD_TYPE') |
|
| 118 | ||
| 119 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
| 120 | if config.utc_offset[0] == '-': |
|
| 121 | timezone_offset = -timezone_offset |
|
| 122 | ||
| 123 | base_start_datetime_utc = None |
|
| 124 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
| 125 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
| 126 | try: |
|
| 127 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
| 128 | except ValueError: |
|
| 129 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 130 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
| 131 | base_start_datetime_utc = \ |
|
| 132 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 133 | # nomalize the start datetime |
|
| 134 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
| 135 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
| 136 | else: |
|
| 137 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
| 138 | ||
| 139 | base_end_datetime_utc = None |
|
| 140 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
| 141 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
| 142 | try: |
|
| 143 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
| 144 | except ValueError: |
|
| 145 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 146 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
| 147 | base_end_datetime_utc = \ |
|
| 148 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 149 | ||
| 150 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
| 151 | base_start_datetime_utc >= base_end_datetime_utc: |
|
| 152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 153 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
| 154 | ||
| 155 | if reporting_period_start_datetime_local is None: |
|
| 156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 157 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
| 158 | else: |
|
| 159 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
| 160 | try: |
|
| 161 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
| 162 | '%Y-%m-%dT%H:%M:%S') |
|
| 163 | except ValueError: |
|
| 164 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 165 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
| 166 | reporting_start_datetime_utc = \ |
|
| 167 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 168 | # nomalize the start datetime |
|
| 169 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
| 170 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
| 171 | else: |
|
| 172 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
| 173 | ||
| 174 | if reporting_period_end_datetime_local is None: |
|
| 175 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 176 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
| 177 | else: |
|
| 178 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
| 179 | try: |
|
| 180 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
| 181 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
| 182 | timedelta(minutes=timezone_offset) |
|
| 183 | except ValueError: |
|
| 184 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 185 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
| 186 | ||
| 187 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
| 188 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 189 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
| 190 | ||
| 191 | # if turn quick mode on, do not return parameters data and excel file |
|
| 192 | is_quick_mode = False |
|
| 193 | if quick_mode is not None and \ |
|
| 194 | len(str.strip(quick_mode)) > 0 and \ |
|
| 195 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
| 196 | is_quick_mode = True |
|
| 197 | ||
| 198 | trans = utilities.get_translation(language) |
|
| 199 | trans.install() |
|
| 200 | _ = trans.gettext |
|
| 201 | ||
| 202 | ################################################################################################################ |
|
| 203 | # Step 2: query the store |
|
| 204 | ################################################################################################################ |
|
| 205 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
| 206 | cursor_system = cnx_system.cursor() |
|
| 207 | ||
| 208 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
|
| 209 | cursor_energy = cnx_energy.cursor() |
|
| 210 | ||
| 211 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
| 212 | cursor_historical = cnx_historical.cursor() |
|
| 213 | ||
| 214 | if store_id is not None: |
|
| 215 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
| 216 | " FROM tbl_stores " |
|
| 217 | " WHERE id = %s ", (store_id,)) |
|
| 218 | row_store = cursor_system.fetchone() |
|
| 219 | elif store_uuid is not None: |
|
| 220 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
| 221 | " FROM tbl_stores " |
|
| 222 | " WHERE uuid = %s ", (store_uuid,)) |
|
| 223 | row_store = cursor_system.fetchone() |
|
| 224 | ||
| 225 | if row_store is None: |
|
| 226 | if cursor_system: |
|
| 227 | cursor_system.close() |
|
| 228 | if cnx_system: |
|
| 229 | cnx_system.close() |
|
| 230 | ||
| 231 | if cursor_energy: |
|
| 232 | cursor_energy.close() |
|
| 233 | if cnx_energy: |
|
| 234 | cnx_energy.close() |
|
| 235 | ||
| 236 | if cursor_historical: |
|
| 237 | cursor_historical.close() |
|
| 238 | if cnx_historical: |
|
| 239 | cnx_historical.close() |
|
| 240 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.STORE_NOT_FOUND') |
|
| 241 | ||
| 242 | store = dict() |
|
| 243 | store['id'] = row_store[0] |
|
| 244 | store['name'] = row_store[1] |
|
| 245 | store['area'] = row_store[2] |
|
| 246 | store['cost_center_id'] = row_store[3] |
|
| 247 | ||
| 248 | ################################################################################################################ |
|
| 249 | # Step 3: query energy categories |
|
| 250 | ################################################################################################################ |
|
| 251 | energy_category_set = set() |
|
| 252 | # query energy categories in base period |
|
| 253 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
|
| 254 | " FROM tbl_store_input_category_hourly " |
|
| 255 | " WHERE store_id = %s " |
|
| 256 | " AND start_datetime_utc >= %s " |
|
| 257 | " AND start_datetime_utc < %s ", |
|
| 258 | (store['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
| 259 | rows_energy_categories = cursor_energy.fetchall() |
|
| 260 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
| 261 | for row_energy_category in rows_energy_categories: |
|
| 262 | energy_category_set.add(row_energy_category[0]) |
|
| 263 | ||
| 264 | # query energy categories in reporting period |
|
| 265 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
|
| 266 | " FROM tbl_store_input_category_hourly " |
|
| 267 | " WHERE store_id = %s " |
|
| 268 | " AND start_datetime_utc >= %s " |
|
| 269 | " AND start_datetime_utc < %s ", |
|
| 270 | (store['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
| 271 | rows_energy_categories = cursor_energy.fetchall() |
|
| 272 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
| 273 | for row_energy_category in rows_energy_categories: |
|
| 274 | energy_category_set.add(row_energy_category[0]) |
|
| 275 | ||
| 276 | # query all energy categories in base period and reporting period |
|
| 277 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
| 278 | " FROM tbl_energy_categories " |
|
| 279 | " ORDER BY id ", ) |
|
| 280 | rows_energy_categories = cursor_system.fetchall() |
|
| 281 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
| 282 | if cursor_system: |
|
| 283 | cursor_system.close() |
|
| 284 | if cnx_system: |
|
| 285 | cnx_system.close() |
|
| 286 | ||
| 287 | if cursor_energy: |
|
| 288 | cursor_energy.close() |
|
| 289 | if cnx_energy: |
|
| 290 | cnx_energy.close() |
|
| 291 | ||
| 292 | if cursor_historical: |
|
| 293 | cursor_historical.close() |
|
| 294 | if cnx_historical: |
|
| 295 | cnx_historical.close() |
|
| 296 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
| 297 | title='API.NOT_FOUND', |
|
| 298 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
| 299 | energy_category_dict = dict() |
|
| 300 | for row_energy_category in rows_energy_categories: |
|
| 301 | if row_energy_category[0] in energy_category_set: |
|
| 302 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
| 303 | "unit_of_measure": row_energy_category[2], |
|
| 304 | "kgce": row_energy_category[3], |
|
| 305 | "kgco2e": row_energy_category[4]} |
|
| 306 | ||
| 307 | ################################################################################################################ |
|
| 308 | # Step 4: query associated sensors |
|
| 309 | ################################################################################################################ |
|
| 310 | point_list = list() |
|
| 311 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
| 312 | " FROM tbl_stores st, tbl_sensors se, tbl_stores_sensors ss, " |
|
| 313 | " tbl_points p, tbl_sensors_points sp " |
|
| 314 | " WHERE st.id = %s AND st.id = ss.store_id AND ss.sensor_id = se.id " |
|
| 315 | " AND se.id = sp.sensor_id AND sp.point_id = p.id " |
|
| 316 | " ORDER BY p.id ", (store['id'],)) |
|
| 317 | rows_points = cursor_system.fetchall() |
|
| 318 | if rows_points is not None and len(rows_points) > 0: |
|
| 319 | for row in rows_points: |
|
| 320 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
| 321 | ||
| 322 | ################################################################################################################ |
|
| 323 | # Step 5: query associated points |
|
| 324 | ################################################################################################################ |
|
| 325 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
| 326 | " FROM tbl_stores s, tbl_stores_points sp, tbl_points p " |
|
| 327 | " WHERE s.id = %s AND s.id = sp.store_id AND sp.point_id = p.id " |
|
| 328 | " ORDER BY p.id ", (store['id'],)) |
|
| 329 | rows_points = cursor_system.fetchall() |
|
| 330 | if rows_points is not None and len(rows_points) > 0: |
|
| 331 | for row in rows_points: |
|
| 332 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
| 333 | ||
| 334 | ################################################################################################################ |
|
| 335 | # Step 6: query base period energy input |
|
| 336 | ################################################################################################################ |
|
| 337 | base = dict() |
|
| 338 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 339 | for energy_category_id in energy_category_set: |
|
| 340 | base[energy_category_id] = dict() |
|
| 341 | base[energy_category_id]['timestamps'] = list() |
|
| 342 | base[energy_category_id]['values'] = list() |
|
| 343 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
| 344 | base[energy_category_id]['mean'] = None |
|
| 345 | base[energy_category_id]['median'] = None |
|
| 346 | base[energy_category_id]['minimum'] = None |
|
| 347 | base[energy_category_id]['maximum'] = None |
|
| 348 | base[energy_category_id]['stdev'] = None |
|
| 349 | base[energy_category_id]['variance'] = None |
|
| 350 | ||
| 351 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
|
| 352 | " FROM tbl_store_input_category_hourly " |
|
| 353 | " WHERE store_id = %s " |
|
| 354 | " AND energy_category_id = %s " |
|
| 355 | " AND start_datetime_utc >= %s " |
|
| 356 | " AND start_datetime_utc < %s " |
|
| 357 | " ORDER BY start_datetime_utc ", |
|
| 358 | (store['id'], |
|
| 359 | energy_category_id, |
|
| 360 | base_start_datetime_utc, |
|
| 361 | base_end_datetime_utc)) |
|
| 362 | rows_store_hourly = cursor_energy.fetchall() |
|
| 363 | ||
| 364 | rows_store_periodically, \ |
|
| 365 | base[energy_category_id]['mean'], \ |
|
| 366 | base[energy_category_id]['median'], \ |
|
| 367 | base[energy_category_id]['minimum'], \ |
|
| 368 | base[energy_category_id]['maximum'], \ |
|
| 369 | base[energy_category_id]['stdev'], \ |
|
| 370 | base[energy_category_id]['variance'] = \ |
|
| 371 | utilities.statistics_hourly_data_by_period(rows_store_hourly, |
|
| 372 | base_start_datetime_utc, |
|
| 373 | base_end_datetime_utc, |
|
| 374 | period_type) |
|
| 375 | ||
| 376 | for row_store_periodically in rows_store_periodically: |
|
| 377 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
| 378 | timedelta(minutes=timezone_offset) |
|
| 379 | if period_type == 'hourly': |
|
| 380 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 381 | elif period_type == 'daily': |
|
| 382 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 383 | elif period_type == 'weekly': |
|
| 384 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 385 | elif period_type == 'monthly': |
|
| 386 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
| 387 | elif period_type == 'yearly': |
|
| 388 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
| 389 | ||
| 390 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
|
| 391 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
| 392 | base[energy_category_id]['values'].append(actual_value) |
|
| 393 | base[energy_category_id]['subtotal'] += actual_value |
|
| 394 | ||
| 395 | ################################################################################################################ |
|
| 396 | # Step 7: query reporting period energy input |
|
| 397 | ################################################################################################################ |
|
| 398 | reporting = dict() |
|
| 399 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 400 | for energy_category_id in energy_category_set: |
|
| 401 | reporting[energy_category_id] = dict() |
|
| 402 | reporting[energy_category_id]['timestamps'] = list() |
|
| 403 | reporting[energy_category_id]['values'] = list() |
|
| 404 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
| 405 | reporting[energy_category_id]['mean'] = None |
|
| 406 | reporting[energy_category_id]['median'] = None |
|
| 407 | reporting[energy_category_id]['minimum'] = None |
|
| 408 | reporting[energy_category_id]['maximum'] = None |
|
| 409 | reporting[energy_category_id]['stdev'] = None |
|
| 410 | reporting[energy_category_id]['variance'] = None |
|
| 411 | ||
| 412 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
|
| 413 | " FROM tbl_store_input_category_hourly " |
|
| 414 | " WHERE store_id = %s " |
|
| 415 | " AND energy_category_id = %s " |
|
| 416 | " AND start_datetime_utc >= %s " |
|
| 417 | " AND start_datetime_utc < %s " |
|
| 418 | " ORDER BY start_datetime_utc ", |
|
| 419 | (store['id'], |
|
| 420 | energy_category_id, |
|
| 421 | reporting_start_datetime_utc, |
|
| 422 | reporting_end_datetime_utc)) |
|
| 423 | rows_store_hourly = cursor_energy.fetchall() |
|
| 424 | ||
| 425 | rows_store_periodically, \ |
|
| 426 | reporting[energy_category_id]['mean'], \ |
|
| 427 | reporting[energy_category_id]['median'], \ |
|
| 428 | reporting[energy_category_id]['minimum'], \ |
|
| 429 | reporting[energy_category_id]['maximum'], \ |
|
| 430 | reporting[energy_category_id]['stdev'], \ |
|
| 431 | reporting[energy_category_id]['variance'] = \ |
|
| 432 | utilities.statistics_hourly_data_by_period(rows_store_hourly, |
|
| 433 | reporting_start_datetime_utc, |
|
| 434 | reporting_end_datetime_utc, |
|
| 435 | period_type) |
|
| 436 | ||
| 437 | for row_store_periodically in rows_store_periodically: |
|
| 438 | current_datetime_local = row_store_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
| 439 | timedelta(minutes=timezone_offset) |
|
| 440 | if period_type == 'hourly': |
|
| 441 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 442 | elif period_type == 'daily': |
|
| 443 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 444 | elif period_type == 'weekly': |
|
| 445 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 446 | elif period_type == 'monthly': |
|
| 447 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
| 448 | elif period_type == 'yearly': |
|
| 449 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
| 450 | ||
| 451 | actual_value = Decimal(0.0) if row_store_periodically[1] is None else row_store_periodically[1] |
|
| 452 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
| 453 | reporting[energy_category_id]['values'].append(actual_value) |
|
| 454 | reporting[energy_category_id]['subtotal'] += actual_value |
|
| 455 | ||
| 456 | ################################################################################################################ |
|
| 457 | # Step 8: query tariff data |
|
| 458 | ################################################################################################################ |
|
| 459 | parameters_data = dict() |
|
| 460 | parameters_data['names'] = list() |
|
| 461 | parameters_data['timestamps'] = list() |
|
| 462 | parameters_data['values'] = list() |
|
| 463 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
| 464 | and not is_quick_mode: |
|
| 465 | for energy_category_id in energy_category_set: |
|
| 466 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(store['cost_center_id'], |
|
| 467 | energy_category_id, |
|
| 468 | reporting_start_datetime_utc, |
|
| 469 | reporting_end_datetime_utc) |
|
| 470 | tariff_timestamp_list = list() |
|
| 471 | tariff_value_list = list() |
|
| 472 | for k, v in energy_category_tariff_dict.items(): |
|
| 473 | # convert k from utc to local |
|
| 474 | k = k + timedelta(minutes=timezone_offset) |
|
| 475 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
| 476 | tariff_value_list.append(v) |
|
| 477 | ||
| 478 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
| 479 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
| 480 | parameters_data['values'].append(tariff_value_list) |
|
| 481 | ||
| 482 | ################################################################################################################ |
|
| 483 | # Step 9: query associated sensors and points data |
|
| 484 | ################################################################################################################ |
|
| 485 | if not is_quick_mode: |
|
| 486 | for point in point_list: |
|
| 487 | point_values = [] |
|
| 488 | point_timestamps = [] |
|
| 489 | if point['object_type'] == 'ENERGY_VALUE': |
|
| 490 | query = (" SELECT utc_date_time, actual_value " |
|
| 491 | " FROM tbl_energy_value " |
|
| 492 | " WHERE point_id = %s " |
|
| 493 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 494 | " ORDER BY utc_date_time ") |
|
| 495 | cursor_historical.execute(query, (point['id'], |
|
| 496 | reporting_start_datetime_utc, |
|
| 497 | reporting_end_datetime_utc)) |
|
| 498 | rows = cursor_historical.fetchall() |
|
| 499 | ||
| 500 | if rows is not None and len(rows) > 0: |
|
| 501 | for row in rows: |
|
| 502 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 503 | timedelta(minutes=timezone_offset) |
|
| 504 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 505 | point_timestamps.append(current_datetime) |
|
| 506 | point_values.append(row[1]) |
|
| 507 | elif point['object_type'] == 'ANALOG_VALUE': |
|
| 508 | query = (" SELECT utc_date_time, actual_value " |
|
| 509 | " FROM tbl_analog_value " |
|
| 510 | " WHERE point_id = %s " |
|
| 511 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 512 | " ORDER BY utc_date_time ") |
|
| 513 | cursor_historical.execute(query, (point['id'], |
|
| 514 | reporting_start_datetime_utc, |
|
| 515 | reporting_end_datetime_utc)) |
|
| 516 | rows = cursor_historical.fetchall() |
|
| 517 | ||
| 518 | if rows is not None and len(rows) > 0: |
|
| 519 | for row in rows: |
|
| 520 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 521 | timedelta(minutes=timezone_offset) |
|
| 522 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 523 | point_timestamps.append(current_datetime) |
|
| 524 | point_values.append(row[1]) |
|
| 525 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
| 526 | query = (" SELECT utc_date_time, actual_value " |
|
| 527 | " FROM tbl_digital_value " |
|
| 528 | " WHERE point_id = %s " |
|
| 529 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 530 | " ORDER BY utc_date_time ") |
|
| 531 | cursor_historical.execute(query, (point['id'], |
|
| 532 | reporting_start_datetime_utc, |
|
| 533 | reporting_end_datetime_utc)) |
|
| 534 | rows = cursor_historical.fetchall() |
|
| 535 | ||
| 536 | if rows is not None and len(rows) > 0: |
|
| 537 | for row in rows: |
|
| 538 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 539 | timedelta(minutes=timezone_offset) |
|
| 540 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 541 | point_timestamps.append(current_datetime) |
|
| 542 | point_values.append(row[1]) |
|
| 543 | ||
| 544 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
| 545 | parameters_data['timestamps'].append(point_timestamps) |
|
| 546 | parameters_data['values'].append(point_values) |
|
| 547 | ||
| 548 | ################################################################################################################ |
|
| 549 | # Step 10: construct the report |
|
| 550 | ################################################################################################################ |
|
| 551 | if cursor_system: |
|
| 552 | cursor_system.close() |
|
| 553 | if cnx_system: |
|
| 554 | cnx_system.close() |
|
| 555 | ||
| 556 | if cursor_energy: |
|
| 557 | cursor_energy.close() |
|
| 558 | if cnx_energy: |
|
| 559 | cnx_energy.close() |
|
| 560 | ||
| 561 | if cursor_historical: |
|
| 562 | cursor_historical.close() |
|
| 563 | if cnx_historical: |
|
| 564 | cnx_historical.close() |
|
| 565 | ||
| 566 | result = dict() |
|
| 567 | ||
| 568 | result['store'] = dict() |
|
| 569 | result['store']['name'] = store['name'] |
|
| 570 | result['store']['area'] = store['area'] |
|
| 571 | ||
| 572 | result['base_period'] = dict() |
|
| 573 | result['base_period']['names'] = list() |
|
| 574 | result['base_period']['units'] = list() |
|
| 575 | result['base_period']['timestamps'] = list() |
|
| 576 | result['base_period']['values'] = list() |
|
| 577 | result['base_period']['subtotals'] = list() |
|
| 578 | result['base_period']['means'] = list() |
|
| 579 | result['base_period']['medians'] = list() |
|
| 580 | result['base_period']['minimums'] = list() |
|
| 581 | result['base_period']['maximums'] = list() |
|
| 582 | result['base_period']['stdevs'] = list() |
|
| 583 | result['base_period']['variances'] = list() |
|
| 584 | ||
| 585 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 586 | for energy_category_id in energy_category_set: |
|
| 587 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
| 588 | result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
|
| 589 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
| 590 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
| 591 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
| 592 | result['base_period']['means'].append(base[energy_category_id]['mean']) |
|
| 593 | result['base_period']['medians'].append(base[energy_category_id]['median']) |
|
| 594 | result['base_period']['minimums'].append(base[energy_category_id]['minimum']) |
|
| 595 | result['base_period']['maximums'].append(base[energy_category_id]['maximum']) |
|
| 596 | result['base_period']['stdevs'].append(base[energy_category_id]['stdev']) |
|
| 597 | result['base_period']['variances'].append(base[energy_category_id]['variance']) |
|
| 598 | ||
| 599 | result['reporting_period'] = dict() |
|
| 600 | result['reporting_period']['names'] = list() |
|
| 601 | result['reporting_period']['energy_category_ids'] = list() |
|
| 602 | result['reporting_period']['units'] = list() |
|
| 603 | result['reporting_period']['timestamps'] = list() |
|
| 604 | result['reporting_period']['values'] = list() |
|
| 605 | result['reporting_period']['rates'] = list() |
|
| 606 | result['reporting_period']['subtotals'] = list() |
|
| 607 | result['reporting_period']['means'] = list() |
|
| 608 | result['reporting_period']['means_per_unit_area'] = list() |
|
| 609 | result['reporting_period']['means_increment_rate'] = list() |
|
| 610 | result['reporting_period']['medians'] = list() |
|
| 611 | result['reporting_period']['medians_per_unit_area'] = list() |
|
| 612 | result['reporting_period']['medians_increment_rate'] = list() |
|
| 613 | result['reporting_period']['minimums'] = list() |
|
| 614 | result['reporting_period']['minimums_per_unit_area'] = list() |
|
| 615 | result['reporting_period']['minimums_increment_rate'] = list() |
|
| 616 | result['reporting_period']['maximums'] = list() |
|
| 617 | result['reporting_period']['maximums_per_unit_area'] = list() |
|
| 618 | result['reporting_period']['maximums_increment_rate'] = list() |
|
| 619 | result['reporting_period']['stdevs'] = list() |
|
| 620 | result['reporting_period']['stdevs_per_unit_area'] = list() |
|
| 621 | result['reporting_period']['stdevs_increment_rate'] = list() |
|
| 622 | result['reporting_period']['variances'] = list() |
|
| 623 | result['reporting_period']['variances_per_unit_area'] = list() |
|
| 624 | result['reporting_period']['variances_increment_rate'] = list() |
|
| 625 | ||
| 626 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 627 | for energy_category_id in energy_category_set: |
|
| 628 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
| 629 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
| 630 | result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
|
| 631 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
| 632 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
| 633 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
| 634 | result['reporting_period']['means'].append(reporting[energy_category_id]['mean']) |
|
| 635 | result['reporting_period']['means_per_unit_area'].append( |
|
| 636 | reporting[energy_category_id]['mean'] / store['area'] |
|
| 637 | if reporting[energy_category_id]['mean'] is not None and |
|
| 638 | store['area'] is not None and |
|
| 639 | store['area'] > Decimal(0.0) |
|
| 640 | else None) |
|
| 641 | result['reporting_period']['means_increment_rate'].append( |
|
| 642 | (reporting[energy_category_id]['mean'] - base[energy_category_id]['mean']) / |
|
| 643 | base[energy_category_id]['mean'] if (base[energy_category_id]['mean'] is not None and |
|
| 644 | base[energy_category_id]['mean'] > Decimal(0.0)) |
|
| 645 | else None) |
|
| 646 | result['reporting_period']['medians'].append(reporting[energy_category_id]['median']) |
|
| 647 | result['reporting_period']['medians_per_unit_area'].append( |
|
| 648 | reporting[energy_category_id]['median'] / store['area'] |
|
| 649 | if reporting[energy_category_id]['median'] is not None and |
|
| 650 | store['area'] is not None and |
|
| 651 | store['area'] > Decimal(0.0) |
|
| 652 | else None) |
|
| 653 | result['reporting_period']['medians_increment_rate'].append( |
|
| 654 | (reporting[energy_category_id]['median'] - base[energy_category_id]['median']) / |
|
| 655 | base[energy_category_id]['median'] if (base[energy_category_id]['median'] is not None and |
|
| 656 | base[energy_category_id]['median'] > Decimal(0.0)) |
|
| 657 | else None) |
|
| 658 | result['reporting_period']['minimums'].append(reporting[energy_category_id]['minimum']) |
|
| 659 | result['reporting_period']['minimums_per_unit_area'].append( |
|
| 660 | reporting[energy_category_id]['minimum'] / store['area'] |
|
| 661 | if reporting[energy_category_id]['minimum'] is not None and |
|
| 662 | store['area'] is not None and |
|
| 663 | store['area'] > Decimal(0.0) |
|
| 664 | else None) |
|
| 665 | result['reporting_period']['minimums_increment_rate'].append( |
|
| 666 | (reporting[energy_category_id]['minimum'] - base[energy_category_id]['minimum']) / |
|
| 667 | base[energy_category_id]['minimum'] if (base[energy_category_id]['minimum'] is not None and |
|
| 668 | base[energy_category_id]['minimum'] > Decimal(0.0)) |
|
| 669 | else None) |
|
| 670 | result['reporting_period']['maximums'].append(reporting[energy_category_id]['maximum']) |
|
| 671 | result['reporting_period']['maximums_per_unit_area'].append( |
|
| 672 | reporting[energy_category_id]['maximum'] / store['area'] |
|
| 673 | if reporting[energy_category_id]['maximum'] is not None and |
|
| 674 | store['area'] is not None and |
|
| 675 | store['area'] > Decimal(0.0) |
|
| 676 | else None) |
|
| 677 | result['reporting_period']['maximums_increment_rate'].append( |
|
| 678 | (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / |
|
| 679 | base[energy_category_id]['maximum'] if (base[energy_category_id]['maximum'] is not None and |
|
| 680 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
|
| 681 | else None) |
|
| 682 | result['reporting_period']['stdevs'].append(reporting[energy_category_id]['stdev']) |
|
| 683 | result['reporting_period']['stdevs_per_unit_area'].append( |
|
| 684 | reporting[energy_category_id]['stdev'] / store['area'] |
|
| 685 | if reporting[energy_category_id]['stdev'] is not None and |
|
| 686 | store['area'] is not None and |
|
| 687 | store['area'] > Decimal(0.0) |
|
| 688 | else None) |
|
| 689 | result['reporting_period']['stdevs_increment_rate'].append( |
|
| 690 | (reporting[energy_category_id]['stdev'] - base[energy_category_id]['stdev']) / |
|
| 691 | base[energy_category_id]['stdev'] if (base[energy_category_id]['stdev'] is not None and |
|
| 692 | base[energy_category_id]['stdev'] > Decimal(0.0)) |
|
| 693 | else None) |
|
| 694 | result['reporting_period']['variances'].append(reporting[energy_category_id]['variance']) |
|
| 695 | result['reporting_period']['variances_per_unit_area'].append( |
|
| 696 | reporting[energy_category_id]['variance'] / store['area'] |
|
| 697 | if reporting[energy_category_id]['variance'] is not None and |
|
| 698 | store['area'] is not None and |
|
| 699 | store['area'] > Decimal(0.0) |
|
| 700 | else None) |
|
| 701 | result['reporting_period']['variances_increment_rate'].append( |
|
| 702 | (reporting[energy_category_id]['variance'] - base[energy_category_id]['variance']) / |
|
| 703 | base[energy_category_id]['variance'] if (base[energy_category_id]['variance'] is not None and |
|
| 704 | base[energy_category_id]['variance'] > Decimal(0.0)) |
|
| 705 | else None) |
|
| 706 | ||
| 707 | rate = list() |
|
| 708 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
| 709 | if index < len(base[energy_category_id]['values']) \ |
|
| 710 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
| 711 | rate.append((value - base[energy_category_id]['values'][index]) |
|
| 712 | / base[energy_category_id]['values'][index]) |
|
| 713 | else: |
|
| 714 | rate.append(None) |
|
| 715 | result['reporting_period']['rates'].append(rate) |
|
| 716 | ||
| 717 | result['parameters'] = { |
|
| 718 | "names": parameters_data['names'], |
|
| 719 | "timestamps": parameters_data['timestamps'], |
|
| 720 | "values": parameters_data['values'] |
|
| 721 | } |
|
| 722 | ||
| 723 | # export result to Excel file and then encode the file to base64 string |
|
| 724 | if not is_quick_mode: |
|
| 725 | result['excel_bytes_base64'] = excelexporters.storestatistics.export(result, |
|
| 726 | store['name'], |
|
| 727 | base_period_start_datetime_local, |
|
| 728 | base_period_end_datetime_local, |
|
| 729 | reporting_period_start_datetime_local, |
|
| 730 | reporting_period_end_datetime_local, |
|
| 731 | period_type, |
|
| 732 | language) |
|
| 733 | resp.text = json.dumps(result) |
|
| 734 | ||
| @@ 45-733 (lines=689) @@ | ||
| 42 | from core.useractivity import access_control, api_key_control |
|
| 43 | ||
| 44 | ||
| 45 | class Reporting: |
|
| 46 | def __init__(self): |
|
| 47 | """"Initializes Reporting""" |
|
| 48 | pass |
|
| 49 | ||
| 50 | @staticmethod |
|
| 51 | def on_options(req, resp): |
|
| 52 | _ = req |
|
| 53 | resp.status = falcon.HTTP_200 |
|
| 54 | ||
| 55 | #################################################################################################################### |
|
| 56 | # PROCEDURES |
|
| 57 | # Step 1: valid parameters |
|
| 58 | # Step 2: query the tenant |
|
| 59 | # Step 3: query energy categories |
|
| 60 | # Step 4: query associated sensors |
|
| 61 | # Step 5: query associated points |
|
| 62 | # Step 6: query base period energy input |
|
| 63 | # Step 7: query reporting period energy input |
|
| 64 | # Step 8: query tariff data |
|
| 65 | # Step 9: query associated sensors and points data |
|
| 66 | # Step 10: construct the report |
|
| 67 | #################################################################################################################### |
|
| 68 | @staticmethod |
|
| 69 | def on_get(req, resp): |
|
| 70 | if 'API-KEY' not in req.headers or \ |
|
| 71 | not isinstance(req.headers['API-KEY'], str) or \ |
|
| 72 | len(str.strip(req.headers['API-KEY'])) == 0: |
|
| 73 | access_control(req) |
|
| 74 | else: |
|
| 75 | api_key_control(req) |
|
| 76 | print(req.params) |
|
| 77 | tenant_id = req.params.get('tenantid') |
|
| 78 | tenant_uuid = req.params.get('tenantuuid') |
|
| 79 | period_type = req.params.get('periodtype') |
|
| 80 | base_period_start_datetime_local = req.params.get('baseperiodstartdatetime') |
|
| 81 | base_period_end_datetime_local = req.params.get('baseperiodenddatetime') |
|
| 82 | reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
| 83 | reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
| 84 | language = req.params.get('language') |
|
| 85 | quick_mode = req.params.get('quickmode') |
|
| 86 | ||
| 87 | ################################################################################################################ |
|
| 88 | # Step 1: valid parameters |
|
| 89 | ################################################################################################################ |
|
| 90 | if tenant_id is None and tenant_uuid is None: |
|
| 91 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 92 | title='API.BAD_REQUEST', |
|
| 93 | description='API.INVALID_TENANT_ID') |
|
| 94 | ||
| 95 | if tenant_id is not None: |
|
| 96 | tenant_id = str.strip(tenant_id) |
|
| 97 | if not tenant_id.isdigit() or int(tenant_id) <= 0: |
|
| 98 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 99 | title='API.BAD_REQUEST', |
|
| 100 | description='API.INVALID_TENANT_ID') |
|
| 101 | ||
| 102 | if tenant_uuid is not None: |
|
| 103 | regex = re.compile(r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) |
|
| 104 | match = regex.match(str.strip(tenant_uuid)) |
|
| 105 | if not bool(match): |
|
| 106 | raise falcon.HTTPError(status=falcon.HTTP_400, |
|
| 107 | title='API.BAD_REQUEST', |
|
| 108 | description='API.INVALID_TENANT_UUID') |
|
| 109 | ||
| 110 | if period_type is None: |
|
| 111 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 112 | description='API.INVALID_PERIOD_TYPE') |
|
| 113 | else: |
|
| 114 | period_type = str.strip(period_type) |
|
| 115 | if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: |
|
| 116 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 117 | description='API.INVALID_PERIOD_TYPE') |
|
| 118 | ||
| 119 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
| 120 | if config.utc_offset[0] == '-': |
|
| 121 | timezone_offset = -timezone_offset |
|
| 122 | ||
| 123 | base_start_datetime_utc = None |
|
| 124 | if base_period_start_datetime_local is not None and len(str.strip(base_period_start_datetime_local)) > 0: |
|
| 125 | base_period_start_datetime_local = str.strip(base_period_start_datetime_local) |
|
| 126 | try: |
|
| 127 | base_start_datetime_utc = datetime.strptime(base_period_start_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
| 128 | except ValueError: |
|
| 129 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 130 | description="API.INVALID_BASE_PERIOD_START_DATETIME") |
|
| 131 | base_start_datetime_utc = \ |
|
| 132 | base_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 133 | # nomalize the start datetime |
|
| 134 | if config.minutes_to_count == 30 and base_start_datetime_utc.minute >= 30: |
|
| 135 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
| 136 | else: |
|
| 137 | base_start_datetime_utc = base_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
| 138 | ||
| 139 | base_end_datetime_utc = None |
|
| 140 | if base_period_end_datetime_local is not None and len(str.strip(base_period_end_datetime_local)) > 0: |
|
| 141 | base_period_end_datetime_local = str.strip(base_period_end_datetime_local) |
|
| 142 | try: |
|
| 143 | base_end_datetime_utc = datetime.strptime(base_period_end_datetime_local, '%Y-%m-%dT%H:%M:%S') |
|
| 144 | except ValueError: |
|
| 145 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 146 | description="API.INVALID_BASE_PERIOD_END_DATETIME") |
|
| 147 | base_end_datetime_utc = \ |
|
| 148 | base_end_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 149 | ||
| 150 | if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ |
|
| 151 | base_start_datetime_utc >= base_end_datetime_utc: |
|
| 152 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 153 | description='API.INVALID_BASE_PERIOD_END_DATETIME') |
|
| 154 | ||
| 155 | if reporting_period_start_datetime_local is None: |
|
| 156 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 157 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
| 158 | else: |
|
| 159 | reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
| 160 | try: |
|
| 161 | reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
| 162 | '%Y-%m-%dT%H:%M:%S') |
|
| 163 | except ValueError: |
|
| 164 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 165 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
| 166 | reporting_start_datetime_utc = \ |
|
| 167 | reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
| 168 | # nomalize the start datetime |
|
| 169 | if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
| 170 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
| 171 | else: |
|
| 172 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
| 173 | ||
| 174 | if reporting_period_end_datetime_local is None: |
|
| 175 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 176 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
| 177 | else: |
|
| 178 | reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
| 179 | try: |
|
| 180 | reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
| 181 | '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ |
|
| 182 | timedelta(minutes=timezone_offset) |
|
| 183 | except ValueError: |
|
| 184 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 185 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
| 186 | ||
| 187 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
| 188 | raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
| 189 | description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
| 190 | ||
| 191 | # if turn quick mode on, do not return parameters data and excel file |
|
| 192 | is_quick_mode = False |
|
| 193 | if quick_mode is not None and \ |
|
| 194 | len(str.strip(quick_mode)) > 0 and \ |
|
| 195 | str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
| 196 | is_quick_mode = True |
|
| 197 | ||
| 198 | trans = utilities.get_translation(language) |
|
| 199 | trans.install() |
|
| 200 | _ = trans.gettext |
|
| 201 | ||
| 202 | ################################################################################################################ |
|
| 203 | # Step 2: query the tenant |
|
| 204 | ################################################################################################################ |
|
| 205 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
| 206 | cursor_system = cnx_system.cursor() |
|
| 207 | ||
| 208 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
|
| 209 | cursor_energy = cnx_energy.cursor() |
|
| 210 | ||
| 211 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
| 212 | cursor_historical = cnx_historical.cursor() |
|
| 213 | ||
| 214 | if tenant_id is not None: |
|
| 215 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
| 216 | " FROM tbl_tenants " |
|
| 217 | " WHERE id = %s ", (tenant_id,)) |
|
| 218 | row_tenant = cursor_system.fetchone() |
|
| 219 | elif tenant_uuid is not None: |
|
| 220 | cursor_system.execute(" SELECT id, name, area, cost_center_id " |
|
| 221 | " FROM tbl_tenants " |
|
| 222 | " WHERE uuid = %s ", (tenant_uuid,)) |
|
| 223 | row_tenant = cursor_system.fetchone() |
|
| 224 | ||
| 225 | if row_tenant is None: |
|
| 226 | if cursor_system: |
|
| 227 | cursor_system.close() |
|
| 228 | if cnx_system: |
|
| 229 | cnx_system.close() |
|
| 230 | ||
| 231 | if cursor_energy: |
|
| 232 | cursor_energy.close() |
|
| 233 | if cnx_energy: |
|
| 234 | cnx_energy.close() |
|
| 235 | ||
| 236 | if cursor_historical: |
|
| 237 | cursor_historical.close() |
|
| 238 | if cnx_historical: |
|
| 239 | cnx_historical.close() |
|
| 240 | raise falcon.HTTPError(status=falcon.HTTP_404, title='API.NOT_FOUND', description='API.TENANT_NOT_FOUND') |
|
| 241 | ||
| 242 | tenant = dict() |
|
| 243 | tenant['id'] = row_tenant[0] |
|
| 244 | tenant['name'] = row_tenant[1] |
|
| 245 | tenant['area'] = row_tenant[2] |
|
| 246 | tenant['cost_center_id'] = row_tenant[3] |
|
| 247 | ||
| 248 | ################################################################################################################ |
|
| 249 | # Step 3: query energy categories |
|
| 250 | ################################################################################################################ |
|
| 251 | energy_category_set = set() |
|
| 252 | # query energy categories in base period |
|
| 253 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
|
| 254 | " FROM tbl_tenant_input_category_hourly " |
|
| 255 | " WHERE tenant_id = %s " |
|
| 256 | " AND start_datetime_utc >= %s " |
|
| 257 | " AND start_datetime_utc < %s ", |
|
| 258 | (tenant['id'], base_start_datetime_utc, base_end_datetime_utc)) |
|
| 259 | rows_energy_categories = cursor_energy.fetchall() |
|
| 260 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
| 261 | for row_energy_category in rows_energy_categories: |
|
| 262 | energy_category_set.add(row_energy_category[0]) |
|
| 263 | ||
| 264 | # query energy categories in reporting period |
|
| 265 | cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " |
|
| 266 | " FROM tbl_tenant_input_category_hourly " |
|
| 267 | " WHERE tenant_id = %s " |
|
| 268 | " AND start_datetime_utc >= %s " |
|
| 269 | " AND start_datetime_utc < %s ", |
|
| 270 | (tenant['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) |
|
| 271 | rows_energy_categories = cursor_energy.fetchall() |
|
| 272 | if rows_energy_categories is not None and len(rows_energy_categories) > 0: |
|
| 273 | for row_energy_category in rows_energy_categories: |
|
| 274 | energy_category_set.add(row_energy_category[0]) |
|
| 275 | ||
| 276 | # query all energy categories in base period and reporting period |
|
| 277 | cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " |
|
| 278 | " FROM tbl_energy_categories " |
|
| 279 | " ORDER BY id ", ) |
|
| 280 | rows_energy_categories = cursor_system.fetchall() |
|
| 281 | if rows_energy_categories is None or len(rows_energy_categories) == 0: |
|
| 282 | if cursor_system: |
|
| 283 | cursor_system.close() |
|
| 284 | if cnx_system: |
|
| 285 | cnx_system.close() |
|
| 286 | ||
| 287 | if cursor_energy: |
|
| 288 | cursor_energy.close() |
|
| 289 | if cnx_energy: |
|
| 290 | cnx_energy.close() |
|
| 291 | ||
| 292 | if cursor_historical: |
|
| 293 | cursor_historical.close() |
|
| 294 | if cnx_historical: |
|
| 295 | cnx_historical.close() |
|
| 296 | raise falcon.HTTPError(status=falcon.HTTP_404, |
|
| 297 | title='API.NOT_FOUND', |
|
| 298 | description='API.ENERGY_CATEGORY_NOT_FOUND') |
|
| 299 | energy_category_dict = dict() |
|
| 300 | for row_energy_category in rows_energy_categories: |
|
| 301 | if row_energy_category[0] in energy_category_set: |
|
| 302 | energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], |
|
| 303 | "unit_of_measure": row_energy_category[2], |
|
| 304 | "kgce": row_energy_category[3], |
|
| 305 | "kgco2e": row_energy_category[4]} |
|
| 306 | ||
| 307 | ################################################################################################################ |
|
| 308 | # Step 4: query associated sensors |
|
| 309 | ################################################################################################################ |
|
| 310 | point_list = list() |
|
| 311 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
| 312 | " FROM tbl_tenants t, tbl_sensors s, tbl_tenants_sensors ts, " |
|
| 313 | " tbl_points p, tbl_sensors_points sp " |
|
| 314 | " WHERE t.id = %s AND t.id = ts.tenant_id AND ts.sensor_id = s.id " |
|
| 315 | " AND s.id = sp.sensor_id AND sp.point_id = p.id " |
|
| 316 | " ORDER BY p.id ", (tenant['id'],)) |
|
| 317 | rows_points = cursor_system.fetchall() |
|
| 318 | if rows_points is not None and len(rows_points) > 0: |
|
| 319 | for row in rows_points: |
|
| 320 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
| 321 | ||
| 322 | ################################################################################################################ |
|
| 323 | # Step 5: query associated points |
|
| 324 | ################################################################################################################ |
|
| 325 | cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " |
|
| 326 | " FROM tbl_tenants t, tbl_tenants_points tp, tbl_points p " |
|
| 327 | " WHERE t.id = %s AND t.id = tp.tenant_id AND tp.point_id = p.id " |
|
| 328 | " ORDER BY p.id ", (tenant['id'],)) |
|
| 329 | rows_points = cursor_system.fetchall() |
|
| 330 | if rows_points is not None and len(rows_points) > 0: |
|
| 331 | for row in rows_points: |
|
| 332 | point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) |
|
| 333 | ||
| 334 | ################################################################################################################ |
|
| 335 | # Step 6: query base period energy input |
|
| 336 | ################################################################################################################ |
|
| 337 | base = dict() |
|
| 338 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 339 | for energy_category_id in energy_category_set: |
|
| 340 | base[energy_category_id] = dict() |
|
| 341 | base[energy_category_id]['timestamps'] = list() |
|
| 342 | base[energy_category_id]['values'] = list() |
|
| 343 | base[energy_category_id]['subtotal'] = Decimal(0.0) |
|
| 344 | base[energy_category_id]['mean'] = None |
|
| 345 | base[energy_category_id]['median'] = None |
|
| 346 | base[energy_category_id]['minimum'] = None |
|
| 347 | base[energy_category_id]['maximum'] = None |
|
| 348 | base[energy_category_id]['stdev'] = None |
|
| 349 | base[energy_category_id]['variance'] = None |
|
| 350 | ||
| 351 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
|
| 352 | " FROM tbl_tenant_input_category_hourly " |
|
| 353 | " WHERE tenant_id = %s " |
|
| 354 | " AND energy_category_id = %s " |
|
| 355 | " AND start_datetime_utc >= %s " |
|
| 356 | " AND start_datetime_utc < %s " |
|
| 357 | " ORDER BY start_datetime_utc ", |
|
| 358 | (tenant['id'], |
|
| 359 | energy_category_id, |
|
| 360 | base_start_datetime_utc, |
|
| 361 | base_end_datetime_utc)) |
|
| 362 | rows_tenant_hourly = cursor_energy.fetchall() |
|
| 363 | ||
| 364 | rows_tenant_periodically, \ |
|
| 365 | base[energy_category_id]['mean'], \ |
|
| 366 | base[energy_category_id]['median'], \ |
|
| 367 | base[energy_category_id]['minimum'], \ |
|
| 368 | base[energy_category_id]['maximum'], \ |
|
| 369 | base[energy_category_id]['stdev'], \ |
|
| 370 | base[energy_category_id]['variance'] = \ |
|
| 371 | utilities.statistics_hourly_data_by_period(rows_tenant_hourly, |
|
| 372 | base_start_datetime_utc, |
|
| 373 | base_end_datetime_utc, |
|
| 374 | period_type) |
|
| 375 | ||
| 376 | for row_tenant_periodically in rows_tenant_periodically: |
|
| 377 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
| 378 | timedelta(minutes=timezone_offset) |
|
| 379 | if period_type == 'hourly': |
|
| 380 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 381 | elif period_type == 'daily': |
|
| 382 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 383 | elif period_type == 'weekly': |
|
| 384 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 385 | elif period_type == 'monthly': |
|
| 386 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
| 387 | elif period_type == 'yearly': |
|
| 388 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
| 389 | ||
| 390 | actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] |
|
| 391 | base[energy_category_id]['timestamps'].append(current_datetime) |
|
| 392 | base[energy_category_id]['values'].append(actual_value) |
|
| 393 | base[energy_category_id]['subtotal'] += actual_value |
|
| 394 | ||
| 395 | ################################################################################################################ |
|
| 396 | # Step 7: query reporting period energy input |
|
| 397 | ################################################################################################################ |
|
| 398 | reporting = dict() |
|
| 399 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 400 | for energy_category_id in energy_category_set: |
|
| 401 | reporting[energy_category_id] = dict() |
|
| 402 | reporting[energy_category_id]['timestamps'] = list() |
|
| 403 | reporting[energy_category_id]['values'] = list() |
|
| 404 | reporting[energy_category_id]['subtotal'] = Decimal(0.0) |
|
| 405 | reporting[energy_category_id]['mean'] = None |
|
| 406 | reporting[energy_category_id]['median'] = None |
|
| 407 | reporting[energy_category_id]['minimum'] = None |
|
| 408 | reporting[energy_category_id]['maximum'] = None |
|
| 409 | reporting[energy_category_id]['stdev'] = None |
|
| 410 | reporting[energy_category_id]['variance'] = None |
|
| 411 | ||
| 412 | cursor_energy.execute(" SELECT start_datetime_utc, actual_value " |
|
| 413 | " FROM tbl_tenant_input_category_hourly " |
|
| 414 | " WHERE tenant_id = %s " |
|
| 415 | " AND energy_category_id = %s " |
|
| 416 | " AND start_datetime_utc >= %s " |
|
| 417 | " AND start_datetime_utc < %s " |
|
| 418 | " ORDER BY start_datetime_utc ", |
|
| 419 | (tenant['id'], |
|
| 420 | energy_category_id, |
|
| 421 | reporting_start_datetime_utc, |
|
| 422 | reporting_end_datetime_utc)) |
|
| 423 | rows_tenant_hourly = cursor_energy.fetchall() |
|
| 424 | ||
| 425 | rows_tenant_periodically, \ |
|
| 426 | reporting[energy_category_id]['mean'], \ |
|
| 427 | reporting[energy_category_id]['median'], \ |
|
| 428 | reporting[energy_category_id]['minimum'], \ |
|
| 429 | reporting[energy_category_id]['maximum'], \ |
|
| 430 | reporting[energy_category_id]['stdev'], \ |
|
| 431 | reporting[energy_category_id]['variance'] = \ |
|
| 432 | utilities.statistics_hourly_data_by_period(rows_tenant_hourly, |
|
| 433 | reporting_start_datetime_utc, |
|
| 434 | reporting_end_datetime_utc, |
|
| 435 | period_type) |
|
| 436 | ||
| 437 | for row_tenant_periodically in rows_tenant_periodically: |
|
| 438 | current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ |
|
| 439 | timedelta(minutes=timezone_offset) |
|
| 440 | if period_type == 'hourly': |
|
| 441 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 442 | elif period_type == 'daily': |
|
| 443 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 444 | elif period_type == 'weekly': |
|
| 445 | current_datetime = current_datetime_local.isoformat()[0:10] |
|
| 446 | elif period_type == 'monthly': |
|
| 447 | current_datetime = current_datetime_local.isoformat()[0:7] |
|
| 448 | elif period_type == 'yearly': |
|
| 449 | current_datetime = current_datetime_local.isoformat()[0:4] |
|
| 450 | ||
| 451 | actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] |
|
| 452 | reporting[energy_category_id]['timestamps'].append(current_datetime) |
|
| 453 | reporting[energy_category_id]['values'].append(actual_value) |
|
| 454 | reporting[energy_category_id]['subtotal'] += actual_value |
|
| 455 | ||
| 456 | ################################################################################################################ |
|
| 457 | # Step 8: query tariff data |
|
| 458 | ################################################################################################################ |
|
| 459 | parameters_data = dict() |
|
| 460 | parameters_data['names'] = list() |
|
| 461 | parameters_data['timestamps'] = list() |
|
| 462 | parameters_data['values'] = list() |
|
| 463 | if config.is_tariff_appended and energy_category_set is not None and len(energy_category_set) > 0 \ |
|
| 464 | and not is_quick_mode: |
|
| 465 | for energy_category_id in energy_category_set: |
|
| 466 | energy_category_tariff_dict = utilities.get_energy_category_tariffs(tenant['cost_center_id'], |
|
| 467 | energy_category_id, |
|
| 468 | reporting_start_datetime_utc, |
|
| 469 | reporting_end_datetime_utc) |
|
| 470 | tariff_timestamp_list = list() |
|
| 471 | tariff_value_list = list() |
|
| 472 | for k, v in energy_category_tariff_dict.items(): |
|
| 473 | # convert k from utc to local |
|
| 474 | k = k + timedelta(minutes=timezone_offset) |
|
| 475 | tariff_timestamp_list.append(k.isoformat()[0:19]) |
|
| 476 | tariff_value_list.append(v) |
|
| 477 | ||
| 478 | parameters_data['names'].append(_('Tariff') + '-' + energy_category_dict[energy_category_id]['name']) |
|
| 479 | parameters_data['timestamps'].append(tariff_timestamp_list) |
|
| 480 | parameters_data['values'].append(tariff_value_list) |
|
| 481 | ||
| 482 | ################################################################################################################ |
|
| 483 | # Step 9: query associated sensors and points data |
|
| 484 | ################################################################################################################ |
|
| 485 | if not is_quick_mode: |
|
| 486 | for point in point_list: |
|
| 487 | point_values = [] |
|
| 488 | point_timestamps = [] |
|
| 489 | if point['object_type'] == 'ENERGY_VALUE': |
|
| 490 | query = (" SELECT utc_date_time, actual_value " |
|
| 491 | " FROM tbl_energy_value " |
|
| 492 | " WHERE point_id = %s " |
|
| 493 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 494 | " ORDER BY utc_date_time ") |
|
| 495 | cursor_historical.execute(query, (point['id'], |
|
| 496 | reporting_start_datetime_utc, |
|
| 497 | reporting_end_datetime_utc)) |
|
| 498 | rows = cursor_historical.fetchall() |
|
| 499 | ||
| 500 | if rows is not None and len(rows) > 0: |
|
| 501 | for row in rows: |
|
| 502 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 503 | timedelta(minutes=timezone_offset) |
|
| 504 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 505 | point_timestamps.append(current_datetime) |
|
| 506 | point_values.append(row[1]) |
|
| 507 | elif point['object_type'] == 'ANALOG_VALUE': |
|
| 508 | query = (" SELECT utc_date_time, actual_value " |
|
| 509 | " FROM tbl_analog_value " |
|
| 510 | " WHERE point_id = %s " |
|
| 511 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 512 | " ORDER BY utc_date_time ") |
|
| 513 | cursor_historical.execute(query, (point['id'], |
|
| 514 | reporting_start_datetime_utc, |
|
| 515 | reporting_end_datetime_utc)) |
|
| 516 | rows = cursor_historical.fetchall() |
|
| 517 | ||
| 518 | if rows is not None and len(rows) > 0: |
|
| 519 | for row in rows: |
|
| 520 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 521 | timedelta(minutes=timezone_offset) |
|
| 522 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 523 | point_timestamps.append(current_datetime) |
|
| 524 | point_values.append(row[1]) |
|
| 525 | elif point['object_type'] == 'DIGITAL_VALUE': |
|
| 526 | query = (" SELECT utc_date_time, actual_value " |
|
| 527 | " FROM tbl_digital_value " |
|
| 528 | " WHERE point_id = %s " |
|
| 529 | " AND utc_date_time BETWEEN %s AND %s " |
|
| 530 | " ORDER BY utc_date_time ") |
|
| 531 | cursor_historical.execute(query, (point['id'], |
|
| 532 | reporting_start_datetime_utc, |
|
| 533 | reporting_end_datetime_utc)) |
|
| 534 | rows = cursor_historical.fetchall() |
|
| 535 | ||
| 536 | if rows is not None and len(rows) > 0: |
|
| 537 | for row in rows: |
|
| 538 | current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
|
| 539 | timedelta(minutes=timezone_offset) |
|
| 540 | current_datetime = current_datetime_local.isoformat()[0:19] |
|
| 541 | point_timestamps.append(current_datetime) |
|
| 542 | point_values.append(row[1]) |
|
| 543 | ||
| 544 | parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
| 545 | parameters_data['timestamps'].append(point_timestamps) |
|
| 546 | parameters_data['values'].append(point_values) |
|
| 547 | ||
| 548 | ################################################################################################################ |
|
| 549 | # Step 10: construct the report |
|
| 550 | ################################################################################################################ |
|
| 551 | if cursor_system: |
|
| 552 | cursor_system.close() |
|
| 553 | if cnx_system: |
|
| 554 | cnx_system.close() |
|
| 555 | ||
| 556 | if cursor_energy: |
|
| 557 | cursor_energy.close() |
|
| 558 | if cnx_energy: |
|
| 559 | cnx_energy.close() |
|
| 560 | ||
| 561 | if cursor_historical: |
|
| 562 | cursor_historical.close() |
|
| 563 | if cnx_historical: |
|
| 564 | cnx_historical.close() |
|
| 565 | ||
| 566 | result = dict() |
|
| 567 | ||
| 568 | result['tenant'] = dict() |
|
| 569 | result['tenant']['name'] = tenant['name'] |
|
| 570 | result['tenant']['area'] = tenant['area'] |
|
| 571 | ||
| 572 | result['base_period'] = dict() |
|
| 573 | result['base_period']['names'] = list() |
|
| 574 | result['base_period']['units'] = list() |
|
| 575 | result['base_period']['timestamps'] = list() |
|
| 576 | result['base_period']['values'] = list() |
|
| 577 | result['base_period']['subtotals'] = list() |
|
| 578 | result['base_period']['means'] = list() |
|
| 579 | result['base_period']['medians'] = list() |
|
| 580 | result['base_period']['minimums'] = list() |
|
| 581 | result['base_period']['maximums'] = list() |
|
| 582 | result['base_period']['stdevs'] = list() |
|
| 583 | result['base_period']['variances'] = list() |
|
| 584 | ||
| 585 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 586 | for energy_category_id in energy_category_set: |
|
| 587 | result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
| 588 | result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
|
| 589 | result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) |
|
| 590 | result['base_period']['values'].append(base[energy_category_id]['values']) |
|
| 591 | result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) |
|
| 592 | result['base_period']['means'].append(base[energy_category_id]['mean']) |
|
| 593 | result['base_period']['medians'].append(base[energy_category_id]['median']) |
|
| 594 | result['base_period']['minimums'].append(base[energy_category_id]['minimum']) |
|
| 595 | result['base_period']['maximums'].append(base[energy_category_id]['maximum']) |
|
| 596 | result['base_period']['stdevs'].append(base[energy_category_id]['stdev']) |
|
| 597 | result['base_period']['variances'].append(base[energy_category_id]['variance']) |
|
| 598 | ||
| 599 | result['reporting_period'] = dict() |
|
| 600 | result['reporting_period']['names'] = list() |
|
| 601 | result['reporting_period']['energy_category_ids'] = list() |
|
| 602 | result['reporting_period']['units'] = list() |
|
| 603 | result['reporting_period']['timestamps'] = list() |
|
| 604 | result['reporting_period']['values'] = list() |
|
| 605 | result['reporting_period']['rates'] = list() |
|
| 606 | result['reporting_period']['subtotals'] = list() |
|
| 607 | result['reporting_period']['means'] = list() |
|
| 608 | result['reporting_period']['means_per_unit_area'] = list() |
|
| 609 | result['reporting_period']['means_increment_rate'] = list() |
|
| 610 | result['reporting_period']['medians'] = list() |
|
| 611 | result['reporting_period']['medians_per_unit_area'] = list() |
|
| 612 | result['reporting_period']['medians_increment_rate'] = list() |
|
| 613 | result['reporting_period']['minimums'] = list() |
|
| 614 | result['reporting_period']['minimums_per_unit_area'] = list() |
|
| 615 | result['reporting_period']['minimums_increment_rate'] = list() |
|
| 616 | result['reporting_period']['maximums'] = list() |
|
| 617 | result['reporting_period']['maximums_per_unit_area'] = list() |
|
| 618 | result['reporting_period']['maximums_increment_rate'] = list() |
|
| 619 | result['reporting_period']['stdevs'] = list() |
|
| 620 | result['reporting_period']['stdevs_per_unit_area'] = list() |
|
| 621 | result['reporting_period']['stdevs_increment_rate'] = list() |
|
| 622 | result['reporting_period']['variances'] = list() |
|
| 623 | result['reporting_period']['variances_per_unit_area'] = list() |
|
| 624 | result['reporting_period']['variances_increment_rate'] = list() |
|
| 625 | ||
| 626 | if energy_category_set is not None and len(energy_category_set) > 0: |
|
| 627 | for energy_category_id in energy_category_set: |
|
| 628 | result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) |
|
| 629 | result['reporting_period']['energy_category_ids'].append(energy_category_id) |
|
| 630 | result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) |
|
| 631 | result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) |
|
| 632 | result['reporting_period']['values'].append(reporting[energy_category_id]['values']) |
|
| 633 | result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) |
|
| 634 | result['reporting_period']['means'].append(reporting[energy_category_id]['mean']) |
|
| 635 | result['reporting_period']['means_per_unit_area'].append( |
|
| 636 | reporting[energy_category_id]['mean'] / tenant['area'] |
|
| 637 | if reporting[energy_category_id]['mean'] is not None and |
|
| 638 | tenant['area'] is not None and |
|
| 639 | tenant['area'] > Decimal(0.0) |
|
| 640 | else None) |
|
| 641 | result['reporting_period']['means_increment_rate'].append( |
|
| 642 | (reporting[energy_category_id]['mean'] - base[energy_category_id]['mean']) / |
|
| 643 | base[energy_category_id]['mean'] if (base[energy_category_id]['mean'] is not None and |
|
| 644 | base[energy_category_id]['mean'] > Decimal(0.0)) |
|
| 645 | else None) |
|
| 646 | result['reporting_period']['medians'].append(reporting[energy_category_id]['median']) |
|
| 647 | result['reporting_period']['medians_per_unit_area'].append( |
|
| 648 | reporting[energy_category_id]['median'] / tenant['area'] |
|
| 649 | if reporting[energy_category_id]['median'] is not None and |
|
| 650 | tenant['area'] is not None and |
|
| 651 | tenant['area'] > Decimal(0.0) |
|
| 652 | else None) |
|
| 653 | result['reporting_period']['medians_increment_rate'].append( |
|
| 654 | (reporting[energy_category_id]['median'] - base[energy_category_id]['median']) / |
|
| 655 | base[energy_category_id]['median'] if (base[energy_category_id]['median'] is not None and |
|
| 656 | base[energy_category_id]['median'] > Decimal(0.0)) |
|
| 657 | else None) |
|
| 658 | result['reporting_period']['minimums'].append(reporting[energy_category_id]['minimum']) |
|
| 659 | result['reporting_period']['minimums_per_unit_area'].append( |
|
| 660 | reporting[energy_category_id]['minimum'] / tenant['area'] |
|
| 661 | if reporting[energy_category_id]['minimum'] is not None and |
|
| 662 | tenant['area'] is not None and |
|
| 663 | tenant['area'] > Decimal(0.0) |
|
| 664 | else None) |
|
| 665 | result['reporting_period']['minimums_increment_rate'].append( |
|
| 666 | (reporting[energy_category_id]['minimum'] - base[energy_category_id]['minimum']) / |
|
| 667 | base[energy_category_id]['minimum'] if (base[energy_category_id]['minimum'] is not None and |
|
| 668 | base[energy_category_id]['minimum'] > Decimal(0.0)) |
|
| 669 | else None) |
|
| 670 | result['reporting_period']['maximums'].append(reporting[energy_category_id]['maximum']) |
|
| 671 | result['reporting_period']['maximums_per_unit_area'].append( |
|
| 672 | reporting[energy_category_id]['maximum'] / tenant['area'] |
|
| 673 | if reporting[energy_category_id]['maximum'] is not None and |
|
| 674 | tenant['area'] is not None and |
|
| 675 | tenant['area'] > Decimal(0.0) |
|
| 676 | else None) |
|
| 677 | result['reporting_period']['maximums_increment_rate'].append( |
|
| 678 | (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / |
|
| 679 | base[energy_category_id]['maximum'] if (base[energy_category_id]['maximum'] is not None and |
|
| 680 | base[energy_category_id]['maximum'] > Decimal(0.0)) |
|
| 681 | else None) |
|
| 682 | result['reporting_period']['stdevs'].append(reporting[energy_category_id]['stdev']) |
|
| 683 | result['reporting_period']['stdevs_per_unit_area'].append( |
|
| 684 | reporting[energy_category_id]['stdev'] / tenant['area'] |
|
| 685 | if reporting[energy_category_id]['stdev'] is not None and |
|
| 686 | tenant['area'] is not None and |
|
| 687 | tenant['area'] > Decimal(0.0) |
|
| 688 | else None) |
|
| 689 | result['reporting_period']['stdevs_increment_rate'].append( |
|
| 690 | (reporting[energy_category_id]['stdev'] - base[energy_category_id]['stdev']) / |
|
| 691 | base[energy_category_id]['stdev'] if (base[energy_category_id]['stdev'] is not None and |
|
| 692 | base[energy_category_id]['stdev'] > Decimal(0.0)) |
|
| 693 | else None) |
|
| 694 | result['reporting_period']['variances'].append(reporting[energy_category_id]['variance']) |
|
| 695 | result['reporting_period']['variances_per_unit_area'].append( |
|
| 696 | reporting[energy_category_id]['variance'] / tenant['area'] |
|
| 697 | if reporting[energy_category_id]['variance'] is not None and |
|
| 698 | tenant['area'] is not None and |
|
| 699 | tenant['area'] > Decimal(0.0) |
|
| 700 | else None) |
|
| 701 | result['reporting_period']['variances_increment_rate'].append( |
|
| 702 | (reporting[energy_category_id]['variance'] - base[energy_category_id]['variance']) / |
|
| 703 | base[energy_category_id]['variance'] if (base[energy_category_id]['variance'] is not None and |
|
| 704 | base[energy_category_id]['variance'] > Decimal(0.0)) |
|
| 705 | else None) |
|
| 706 | ||
| 707 | rate = list() |
|
| 708 | for index, value in enumerate(reporting[energy_category_id]['values']): |
|
| 709 | if index < len(base[energy_category_id]['values']) \ |
|
| 710 | and base[energy_category_id]['values'][index] != 0 and value != 0: |
|
| 711 | rate.append((value - base[energy_category_id]['values'][index]) |
|
| 712 | / base[energy_category_id]['values'][index]) |
|
| 713 | else: |
|
| 714 | rate.append(None) |
|
| 715 | result['reporting_period']['rates'].append(rate) |
|
| 716 | ||
| 717 | result['parameters'] = { |
|
| 718 | "names": parameters_data['names'], |
|
| 719 | "timestamps": parameters_data['timestamps'], |
|
| 720 | "values": parameters_data['values'] |
|
| 721 | } |
|
| 722 | # export result to Excel file and then encode the file to base64 string |
|
| 723 | if not is_quick_mode: |
|
| 724 | result['excel_bytes_base64'] = excelexporters.tenantstatistics.export(result, |
|
| 725 | tenant['name'], |
|
| 726 | base_period_start_datetime_local, |
|
| 727 | base_period_end_datetime_local, |
|
| 728 | reporting_period_start_datetime_local, |
|
| 729 | reporting_period_end_datetime_local, |
|
| 730 | period_type, |
|
| 731 | language) |
|
| 732 | ||
| 733 | resp.text = json.dumps(result) |
|
| 734 | ||