| Conditions | 87 |
| Total Lines | 529 |
| Code Lines | 373 |
| Lines | 529 |
| Ratio | 100 % |
| Changes | 0 | ||
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like reports.shopfloorcomparison.Reporting.on_get() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
| 1 | import re |
||
| 31 | @staticmethod |
||
| 32 | def on_get(req, resp): |
||
| 33 | if ( |
||
| 34 | "API-KEY" not in req.headers |
||
| 35 | or not isinstance(req.headers["API-KEY"], str) |
||
| 36 | or len(str.strip(req.headers["API-KEY"])) == 0 |
||
| 37 | ): |
||
| 38 | access_control(req) |
||
| 39 | else: |
||
| 40 | api_key_control(req) |
||
| 41 | print(req.params) |
||
| 42 | # this procedure accepts shopfloor id or shopfloor uuid to identify a shopfloor |
||
| 43 | shopfloor_id1 = req.params.get("shopfloorid1") |
||
| 44 | shopfloor_uuid1 = req.params.get("shopflooruuid1") |
||
| 45 | shopfloor_id2 = req.params.get("shopfloorid2") |
||
| 46 | shopfloor_uuid2 = req.params.get("shopflooruuid2") |
||
| 47 | energy_category_id = req.params.get("energycategoryid") |
||
| 48 | period_type = req.params.get("periodtype") |
||
| 49 | reporting_period_start_datetime_local = req.params.get( |
||
| 50 | "reportingperiodstartdatetime" |
||
| 51 | ) |
||
| 52 | reporting_period_end_datetime_local = req.params.get( |
||
| 53 | "reportingperiodenddatetime" |
||
| 54 | ) |
||
| 55 | language = req.params.get("language") |
||
| 56 | quick_mode = req.params.get("quickmode") |
||
| 57 | |||
| 58 | ################################################################################################################ |
||
| 59 | # Step 1: valid parameters |
||
| 60 | ################################################################################################################ |
||
| 61 | if shopfloor_id1 is None and shopfloor_uuid1 is None: |
||
| 62 | raise falcon.HTTPError( |
||
| 63 | status=falcon.HTTP_400, |
||
| 64 | title="API.BAD_REQUEST", |
||
| 65 | description="API.INVALID_EQUIPMENT_ID", |
||
| 66 | ) |
||
| 67 | |||
| 68 | if shopfloor_id1 is not None: |
||
| 69 | shopfloor_id1 = str.strip(shopfloor_id1) |
||
| 70 | if not shopfloor_id1.isdigit() or int(shopfloor_id1) <= 0: |
||
| 71 | raise falcon.HTTPError( |
||
| 72 | status=falcon.HTTP_400, |
||
| 73 | title="API.BAD_REQUEST", |
||
| 74 | description="API.INVALID_EQUIPMENT_ID", |
||
| 75 | ) |
||
| 76 | |||
| 77 | if shopfloor_uuid1 is not None: |
||
| 78 | regex = re.compile( |
||
| 79 | r"^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z", |
||
| 80 | re.I, |
||
| 81 | ) |
||
| 82 | match = regex.match(str.strip(shopfloor_uuid1)) |
||
| 83 | if not bool(match): |
||
| 84 | raise falcon.HTTPError( |
||
| 85 | status=falcon.HTTP_400, |
||
| 86 | title="API.BAD_REQUEST", |
||
| 87 | description="API.INVALID_EQUIPMENT_UUID", |
||
| 88 | ) |
||
| 89 | |||
| 90 | if shopfloor_id2 is None and shopfloor_uuid2 is None: |
||
| 91 | raise falcon.HTTPError( |
||
| 92 | status=falcon.HTTP_400, |
||
| 93 | title="API.BAD_REQUEST", |
||
| 94 | description="API.INVALID_EQUIPMENT_ID", |
||
| 95 | ) |
||
| 96 | |||
| 97 | if shopfloor_id2 is not None: |
||
| 98 | shopfloor_id2 = str.strip(shopfloor_id2) |
||
| 99 | if not shopfloor_id2.isdigit() or int(shopfloor_id2) <= 0: |
||
| 100 | raise falcon.HTTPError( |
||
| 101 | status=falcon.HTTP_400, |
||
| 102 | title="API.BAD_REQUEST", |
||
| 103 | description="API.INVALID_EQUIPMENT_ID", |
||
| 104 | ) |
||
| 105 | |||
| 106 | if shopfloor_uuid2 is not None: |
||
| 107 | regex = re.compile( |
||
| 108 | r"^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z", |
||
| 109 | re.I, |
||
| 110 | ) |
||
| 111 | match = regex.match(str.strip(shopfloor_uuid2)) |
||
| 112 | if not bool(match): |
||
| 113 | raise falcon.HTTPError( |
||
| 114 | status=falcon.HTTP_400, |
||
| 115 | title="API.BAD_REQUEST", |
||
| 116 | description="API.INVALID_EQUIPMENT_UUID", |
||
| 117 | ) |
||
| 118 | |||
| 119 | if energy_category_id is None: |
||
| 120 | raise falcon.HTTPError( |
||
| 121 | status=falcon.HTTP_400, |
||
| 122 | title="API.BAD_REQUEST", |
||
| 123 | description="API.INVALID_ENERGY_CATEGORY_ID", |
||
| 124 | ) |
||
| 125 | else: |
||
| 126 | energy_category_id = str.strip(energy_category_id) |
||
| 127 | if not energy_category_id.isdigit() or int(energy_category_id) <= 0: |
||
| 128 | raise falcon.HTTPError( |
||
| 129 | status=falcon.HTTP_400, |
||
| 130 | title="API.BAD_REQUEST", |
||
| 131 | description="API.INVALID_ENERGY_CATEGORY_ID", |
||
| 132 | ) |
||
| 133 | |||
| 134 | if period_type is None: |
||
| 135 | raise falcon.HTTPError( |
||
| 136 | status=falcon.HTTP_400, |
||
| 137 | title="API.BAD_REQUEST", |
||
| 138 | description="API.INVALID_PERIOD_TYPE", |
||
| 139 | ) |
||
| 140 | else: |
||
| 141 | period_type = str.strip(period_type) |
||
| 142 | if period_type not in ["hourly", "daily", "weekly", "monthly", "yearly"]: |
||
| 143 | raise falcon.HTTPError( |
||
| 144 | status=falcon.HTTP_400, |
||
| 145 | title="API.BAD_REQUEST", |
||
| 146 | description="API.INVALID_PERIOD_TYPE", |
||
| 147 | ) |
||
| 148 | |||
| 149 | timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
||
| 150 | if config.utc_offset[0] == "-": |
||
| 151 | timezone_offset = -timezone_offset |
||
| 152 | |||
| 153 | if reporting_period_start_datetime_local is None: |
||
| 154 | raise falcon.HTTPError( |
||
| 155 | status=falcon.HTTP_400, |
||
| 156 | title="API.BAD_REQUEST", |
||
| 157 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME", |
||
| 158 | ) |
||
| 159 | else: |
||
| 160 | reporting_period_start_datetime_local = str.strip( |
||
| 161 | reporting_period_start_datetime_local |
||
| 162 | ) |
||
| 163 | try: |
||
| 164 | reporting_start_datetime_utc = datetime.strptime( |
||
| 165 | reporting_period_start_datetime_local, "%Y-%m-%dT%H:%M:%S" |
||
| 166 | ) |
||
| 167 | except ValueError: |
||
| 168 | raise falcon.HTTPError( |
||
| 169 | status=falcon.HTTP_400, |
||
| 170 | title="API.BAD_REQUEST", |
||
| 171 | description="API.INVALID_REPORTING_PERIOD_START_DATETIME", |
||
| 172 | ) |
||
| 173 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace( |
||
| 174 | tzinfo=timezone.utc |
||
| 175 | ) - timedelta(minutes=timezone_offset) |
||
| 176 | # nomalize the start datetime |
||
| 177 | if ( |
||
| 178 | config.minutes_to_count == 30 |
||
| 179 | and reporting_start_datetime_utc.minute >= 30 |
||
| 180 | ): |
||
| 181 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace( |
||
| 182 | minute=30, second=0, microsecond=0 |
||
| 183 | ) |
||
| 184 | else: |
||
| 185 | reporting_start_datetime_utc = reporting_start_datetime_utc.replace( |
||
| 186 | minute=0, second=0, microsecond=0 |
||
| 187 | ) |
||
| 188 | |||
| 189 | if reporting_period_end_datetime_local is None: |
||
| 190 | raise falcon.HTTPError( |
||
| 191 | status=falcon.HTTP_400, |
||
| 192 | title="API.BAD_REQUEST", |
||
| 193 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME", |
||
| 194 | ) |
||
| 195 | else: |
||
| 196 | reporting_period_end_datetime_local = str.strip( |
||
| 197 | reporting_period_end_datetime_local |
||
| 198 | ) |
||
| 199 | try: |
||
| 200 | reporting_end_datetime_utc = datetime.strptime( |
||
| 201 | reporting_period_end_datetime_local, "%Y-%m-%dT%H:%M:%S" |
||
| 202 | ) |
||
| 203 | except ValueError: |
||
| 204 | raise falcon.HTTPError( |
||
| 205 | status=falcon.HTTP_400, |
||
| 206 | title="API.BAD_REQUEST", |
||
| 207 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME", |
||
| 208 | ) |
||
| 209 | reporting_end_datetime_utc = reporting_end_datetime_utc.replace( |
||
| 210 | tzinfo=timezone.utc |
||
| 211 | ) - timedelta(minutes=timezone_offset) |
||
| 212 | |||
| 213 | if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
||
| 214 | raise falcon.HTTPError( |
||
| 215 | status=falcon.HTTP_400, |
||
| 216 | title="API.BAD_REQUEST", |
||
| 217 | description="API.INVALID_REPORTING_PERIOD_END_DATETIME", |
||
| 218 | ) |
||
| 219 | |||
| 220 | # if turn quick mode on, do not return parameters data and excel file |
||
| 221 | is_quick_mode = False |
||
| 222 | if ( |
||
| 223 | quick_mode is not None |
||
| 224 | and len(str.strip(quick_mode)) > 0 |
||
| 225 | and str.lower(str.strip(quick_mode)) in ("true", "t", "on", "yes", "y") |
||
| 226 | ): |
||
| 227 | is_quick_mode = True |
||
| 228 | |||
| 229 | ################################################################################################################ |
||
| 230 | # Step 2: query the shopfloor and energy category |
||
| 231 | ################################################################################################################ |
||
| 232 | cnx_system = mysql.connector.connect(**config.myems_system_db) |
||
| 233 | cursor_system = cnx_system.cursor() |
||
| 234 | |||
| 235 | cnx_energy = mysql.connector.connect(**config.myems_energy_db) |
||
| 236 | cursor_energy = cnx_energy.cursor() |
||
| 237 | |||
| 238 | cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
||
| 239 | cursor_historical = cnx_historical.cursor() |
||
| 240 | |||
| 241 | # Query shopfloor 1 |
||
| 242 | if shopfloor_id1 is not None: |
||
| 243 | cursor_system.execute( |
||
| 244 | " SELECT id, name FROM tbl_shopfloors WHERE id = %s ", (shopfloor_id1,) |
||
| 245 | ) |
||
| 246 | row_shopfloor1 = cursor_system.fetchone() |
||
| 247 | elif shopfloor_uuid1 is not None: |
||
| 248 | cursor_system.execute( |
||
| 249 | " SELECT id, name FROM tbl_shopfloors WHERE uuid = %s ", |
||
| 250 | (shopfloor_uuid1,), |
||
| 251 | ) |
||
| 252 | row_shopfloor1 = cursor_system.fetchone() |
||
| 253 | |||
| 254 | if row_shopfloor1 is None: |
||
| 255 | if cursor_system: |
||
| 256 | cursor_system.close() |
||
| 257 | if cnx_system: |
||
| 258 | cnx_system.close() |
||
| 259 | |||
| 260 | if cursor_energy: |
||
| 261 | cursor_energy.close() |
||
| 262 | if cnx_energy: |
||
| 263 | cnx_energy.close() |
||
| 264 | |||
| 265 | if cursor_historical: |
||
| 266 | cursor_historical.close() |
||
| 267 | if cnx_historical: |
||
| 268 | cnx_historical.close() |
||
| 269 | raise falcon.HTTPError( |
||
| 270 | status=falcon.HTTP_404, |
||
| 271 | title="API.NOT_FOUND", |
||
| 272 | description="API.EQUIPMENT_NOT_FOUND", |
||
| 273 | ) |
||
| 274 | |||
| 275 | shopfloor1 = dict() |
||
| 276 | shopfloor1["id"] = row_shopfloor1[0] |
||
| 277 | shopfloor1["name"] = row_shopfloor1[1] |
||
| 278 | |||
| 279 | # Query shopfloor 2 |
||
| 280 | if shopfloor_id2 is not None: |
||
| 281 | cursor_system.execute( |
||
| 282 | " SELECT id, name FROM tbl_shopfloors WHERE id = %s ", (shopfloor_id2,) |
||
| 283 | ) |
||
| 284 | row_shopfloor2 = cursor_system.fetchone() |
||
| 285 | elif shopfloor_uuid2 is not None: |
||
| 286 | cursor_system.execute( |
||
| 287 | " SELECT id, name FROM tbl_shopfloors WHERE uuid = %s ", |
||
| 288 | (shopfloor_uuid2,), |
||
| 289 | ) |
||
| 290 | row_shopfloor2 = cursor_system.fetchone() |
||
| 291 | |||
| 292 | if row_shopfloor2 is None: |
||
| 293 | if cursor_system: |
||
| 294 | cursor_system.close() |
||
| 295 | if cnx_system: |
||
| 296 | cnx_system.close() |
||
| 297 | |||
| 298 | if cursor_energy: |
||
| 299 | cursor_energy.close() |
||
| 300 | if cnx_energy: |
||
| 301 | cnx_energy.close() |
||
| 302 | |||
| 303 | if cursor_historical: |
||
| 304 | cursor_historical.close() |
||
| 305 | if cnx_historical: |
||
| 306 | cnx_historical.close() |
||
| 307 | raise falcon.HTTPError( |
||
| 308 | status=falcon.HTTP_404, |
||
| 309 | title="API.NOT_FOUND", |
||
| 310 | description="API.EQUIPMENT_NOT_FOUND", |
||
| 311 | ) |
||
| 312 | |||
| 313 | shopfloor2 = dict() |
||
| 314 | shopfloor2["id"] = row_shopfloor2[0] |
||
| 315 | shopfloor2["name"] = row_shopfloor2[1] |
||
| 316 | |||
| 317 | # Query energy category |
||
| 318 | cursor_system.execute( |
||
| 319 | " SELECT id, name, unit_of_measure FROM tbl_energy_categories WHERE id = %s ", |
||
| 320 | (energy_category_id,), |
||
| 321 | ) |
||
| 322 | row_energy_category = cursor_system.fetchone() |
||
| 323 | |||
| 324 | if row_energy_category is None: |
||
| 325 | if cursor_system: |
||
| 326 | cursor_system.close() |
||
| 327 | if cnx_system: |
||
| 328 | cnx_system.close() |
||
| 329 | |||
| 330 | if cursor_energy: |
||
| 331 | cursor_energy.close() |
||
| 332 | if cnx_energy: |
||
| 333 | cnx_energy.close() |
||
| 334 | |||
| 335 | if cursor_historical: |
||
| 336 | cursor_historical.close() |
||
| 337 | if cnx_historical: |
||
| 338 | cnx_historical.close() |
||
| 339 | raise falcon.HTTPError( |
||
| 340 | status=falcon.HTTP_404, |
||
| 341 | title="API.NOT_FOUND", |
||
| 342 | description="API.ENERGY_CATEGORY_NOT_FOUND", |
||
| 343 | ) |
||
| 344 | |||
| 345 | energy_category = dict() |
||
| 346 | energy_category["id"] = row_energy_category[0] |
||
| 347 | energy_category["name"] = row_energy_category[1] |
||
| 348 | energy_category["unit_of_measure"] = row_energy_category[2] |
||
| 349 | |||
| 350 | ################################################################################################################ |
||
| 351 | # Step 3: query shopfloor input category hourly data (pre-aggregated by background service) |
||
| 352 | ################################################################################################################ |
||
| 353 | # Query shopfloor 1 input category hourly data |
||
| 354 | cursor_energy.execute( |
||
| 355 | " SELECT start_datetime_utc, actual_value " |
||
| 356 | " FROM tbl_shopfloor_input_category_hourly " |
||
| 357 | " WHERE shopfloor_id = %s " |
||
| 358 | " AND energy_category_id = %s " |
||
| 359 | " AND start_datetime_utc >= %s " |
||
| 360 | " AND start_datetime_utc < %s " |
||
| 361 | " ORDER BY start_datetime_utc ", |
||
| 362 | ( |
||
| 363 | shopfloor1["id"], |
||
| 364 | energy_category_id, |
||
| 365 | reporting_start_datetime_utc, |
||
| 366 | reporting_end_datetime_utc, |
||
| 367 | ), |
||
| 368 | ) |
||
| 369 | rows_shopfloor1_hourly = cursor_energy.fetchall() |
||
| 370 | |||
| 371 | # Query shopfloor 2 input category hourly data |
||
| 372 | cursor_energy.execute( |
||
| 373 | " SELECT start_datetime_utc, actual_value " |
||
| 374 | " FROM tbl_shopfloor_input_category_hourly " |
||
| 375 | " WHERE shopfloor_id = %s " |
||
| 376 | " AND energy_category_id = %s " |
||
| 377 | " AND start_datetime_utc >= %s " |
||
| 378 | " AND start_datetime_utc < %s " |
||
| 379 | " ORDER BY start_datetime_utc ", |
||
| 380 | ( |
||
| 381 | shopfloor2["id"], |
||
| 382 | energy_category_id, |
||
| 383 | reporting_start_datetime_utc, |
||
| 384 | reporting_end_datetime_utc, |
||
| 385 | ), |
||
| 386 | ) |
||
| 387 | rows_shopfloor2_hourly = cursor_energy.fetchall() |
||
| 388 | |||
| 389 | ################################################################################################################ |
||
| 390 | # Step 4: aggregate shopfloor energy consumption data by period |
||
| 391 | ################################################################################################################ |
||
| 392 | # Aggregate energy consumption for shopfloor 1 |
||
| 393 | shopfloor1_energy_data = dict() |
||
| 394 | shopfloor1_energy_data["timestamps"] = list() |
||
| 395 | shopfloor1_energy_data["values"] = list() |
||
| 396 | shopfloor1_energy_data["total_in_category"] = Decimal(0.0) |
||
| 397 | |||
| 398 | # Aggregate shopfloor 1 hourly data by period |
||
| 399 | rows_shopfloor1_periodically = utilities.aggregate_hourly_data_by_period( |
||
| 400 | rows_shopfloor1_hourly, |
||
| 401 | reporting_start_datetime_utc, |
||
| 402 | reporting_end_datetime_utc, |
||
| 403 | period_type, |
||
| 404 | ) |
||
| 405 | |||
| 406 | for row_shopfloor1_periodically in rows_shopfloor1_periodically: |
||
| 407 | current_datetime_local = row_shopfloor1_periodically[0].replace( |
||
| 408 | tzinfo=timezone.utc |
||
| 409 | ) + timedelta(minutes=timezone_offset) |
||
| 410 | if period_type == "hourly": |
||
| 411 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
| 412 | elif period_type == "daily": |
||
| 413 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 414 | elif period_type == "weekly": |
||
| 415 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 416 | elif period_type == "monthly": |
||
| 417 | current_datetime = current_datetime_local.isoformat()[0:7] |
||
| 418 | elif period_type == "yearly": |
||
| 419 | current_datetime = current_datetime_local.isoformat()[0:4] |
||
| 420 | |||
| 421 | actual_value = row_shopfloor1_periodically[1] |
||
| 422 | |||
| 423 | shopfloor1_energy_data["timestamps"].append(current_datetime) |
||
| 424 | shopfloor1_energy_data["values"].append(actual_value) |
||
| 425 | if actual_value is not None: |
||
| 426 | shopfloor1_energy_data["total_in_category"] += actual_value |
||
| 427 | |||
| 428 | # Aggregate energy consumption for shopfloor 2 |
||
| 429 | shopfloor2_energy_data = dict() |
||
| 430 | shopfloor2_energy_data["timestamps"] = list() |
||
| 431 | shopfloor2_energy_data["values"] = list() |
||
| 432 | shopfloor2_energy_data["total_in_category"] = Decimal(0.0) |
||
| 433 | |||
| 434 | # Aggregate shopfloor 2 hourly data by period |
||
| 435 | rows_shopfloor2_periodically = utilities.aggregate_hourly_data_by_period( |
||
| 436 | rows_shopfloor2_hourly, |
||
| 437 | reporting_start_datetime_utc, |
||
| 438 | reporting_end_datetime_utc, |
||
| 439 | period_type, |
||
| 440 | ) |
||
| 441 | |||
| 442 | for row_shopfloor2_periodically in rows_shopfloor2_periodically: |
||
| 443 | current_datetime_local = row_shopfloor2_periodically[0].replace( |
||
| 444 | tzinfo=timezone.utc |
||
| 445 | ) + timedelta(minutes=timezone_offset) |
||
| 446 | if period_type == "hourly": |
||
| 447 | current_datetime = current_datetime_local.isoformat()[0:19] |
||
| 448 | elif period_type == "daily": |
||
| 449 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 450 | elif period_type == "weekly": |
||
| 451 | current_datetime = current_datetime_local.isoformat()[0:10] |
||
| 452 | elif period_type == "monthly": |
||
| 453 | current_datetime = current_datetime_local.isoformat()[0:7] |
||
| 454 | elif period_type == "yearly": |
||
| 455 | current_datetime = current_datetime_local.isoformat()[0:4] |
||
| 456 | |||
| 457 | actual_value = row_shopfloor2_periodically[1] |
||
| 458 | |||
| 459 | shopfloor2_energy_data["timestamps"].append(current_datetime) |
||
| 460 | shopfloor2_energy_data["values"].append(actual_value) |
||
| 461 | if actual_value is not None: |
||
| 462 | shopfloor2_energy_data["total_in_category"] += actual_value |
||
| 463 | |||
| 464 | # Calculate difference |
||
| 465 | diff = dict() |
||
| 466 | diff["values"] = list() |
||
| 467 | diff["total_in_category"] = Decimal(0.0) |
||
| 468 | |||
| 469 | # Ensure both shopfloors have the same number of data points |
||
| 470 | min_length = min( |
||
| 471 | len(shopfloor1_energy_data["values"]), len(shopfloor2_energy_data["values"]) |
||
| 472 | ) |
||
| 473 | for i in range(min_length): |
||
| 474 | shopfloor1_value = ( |
||
| 475 | shopfloor1_energy_data["values"][i] |
||
| 476 | if i < len(shopfloor1_energy_data["values"]) |
||
| 477 | else None |
||
| 478 | ) |
||
| 479 | shopfloor2_value = ( |
||
| 480 | shopfloor2_energy_data["values"][i] |
||
| 481 | if i < len(shopfloor2_energy_data["values"]) |
||
| 482 | else None |
||
| 483 | ) |
||
| 484 | |||
| 485 | # Calculate difference, handling None values |
||
| 486 | if shopfloor1_value is None and shopfloor2_value is None: |
||
| 487 | diff_value = None |
||
| 488 | elif shopfloor1_value is None: |
||
| 489 | diff_value = None # Cannot calculate difference when one value is missing |
||
| 490 | elif shopfloor2_value is None: |
||
| 491 | diff_value = None # Cannot calculate difference when one value is missing |
||
| 492 | else: |
||
| 493 | diff_value = shopfloor1_value - shopfloor2_value |
||
| 494 | diff["total_in_category"] += diff_value |
||
| 495 | |||
| 496 | diff["values"].append(diff_value) |
||
| 497 | |||
| 498 | ################################################################################################################ |
||
| 499 | # Step 5: construct the report |
||
| 500 | ################################################################################################################ |
||
| 501 | if cursor_system: |
||
| 502 | cursor_system.close() |
||
| 503 | if cnx_system: |
||
| 504 | cnx_system.close() |
||
| 505 | |||
| 506 | if cursor_energy: |
||
| 507 | cursor_energy.close() |
||
| 508 | if cnx_energy: |
||
| 509 | cnx_energy.close() |
||
| 510 | |||
| 511 | if cursor_historical: |
||
| 512 | cursor_historical.close() |
||
| 513 | if cnx_historical: |
||
| 514 | cnx_historical.close() |
||
| 515 | |||
| 516 | result = { |
||
| 517 | "shopfloor1": { |
||
| 518 | "id": shopfloor1["id"], |
||
| 519 | "name": shopfloor1["name"], |
||
| 520 | }, |
||
| 521 | "shopfloor2": { |
||
| 522 | "id": shopfloor2["id"], |
||
| 523 | "name": shopfloor2["name"], |
||
| 524 | }, |
||
| 525 | "energy_category": { |
||
| 526 | "id": energy_category["id"], |
||
| 527 | "name": energy_category["name"], |
||
| 528 | "unit_of_measure": energy_category["unit_of_measure"], |
||
| 529 | }, |
||
| 530 | "reporting_period1": { |
||
| 531 | "total_in_category": shopfloor1_energy_data["total_in_category"], |
||
| 532 | "timestamps": shopfloor1_energy_data["timestamps"], |
||
| 533 | "values": shopfloor1_energy_data["values"], |
||
| 534 | }, |
||
| 535 | "reporting_period2": { |
||
| 536 | "total_in_category": shopfloor2_energy_data["total_in_category"], |
||
| 537 | "timestamps": shopfloor2_energy_data["timestamps"], |
||
| 538 | "values": shopfloor2_energy_data["values"], |
||
| 539 | }, |
||
| 540 | "diff": { |
||
| 541 | "values": diff["values"], |
||
| 542 | "total_in_category": diff["total_in_category"], |
||
| 543 | }, |
||
| 544 | } |
||
| 545 | |||
| 546 | # export result to Excel file and then encode the file to base64 string |
||
| 547 | if not is_quick_mode: |
||
| 548 | result["excel_bytes_base64"] = excelexporters.shopfloorcomparison.export( |
||
| 549 | result, |
||
| 550 | shopfloor1["name"], |
||
| 551 | shopfloor2["name"], |
||
| 552 | energy_category["name"], |
||
| 553 | reporting_period_start_datetime_local, |
||
| 554 | reporting_period_end_datetime_local, |
||
| 555 | period_type, |
||
| 556 | language, |
||
| 557 | ) |
||
| 558 | |||
| 559 | resp.text = json.dumps(result) |
||
| 560 |