1
|
|
|
import re |
2
|
|
|
from datetime import datetime, timedelta, timezone |
3
|
|
|
import falcon |
4
|
|
|
import mysql.connector |
5
|
|
|
import simplejson as json |
6
|
|
|
import config |
7
|
|
|
import excelexporters.energystoragepowerstationreportingparameters |
8
|
|
|
from core import utilities |
9
|
|
|
from core.useractivity import access_control, api_key_control |
10
|
|
|
|
11
|
|
|
|
12
|
|
|
class Reporting: |
13
|
|
|
def __init__(self): |
14
|
|
|
"""Initializes Class""" |
15
|
|
|
pass |
16
|
|
|
|
17
|
|
|
@staticmethod |
18
|
|
|
def on_options(req, resp): |
19
|
|
|
_ = req |
20
|
|
|
resp.status = falcon.HTTP_200 |
21
|
|
|
|
22
|
|
|
#################################################################################################################### |
23
|
|
|
# PROCEDURES |
24
|
|
|
# Step 1: valid parameters |
25
|
|
|
# Step 2: query associated points |
26
|
|
|
# Step 3: query associated points data |
27
|
|
|
# Step 4: construct the report |
28
|
|
|
#################################################################################################################### |
29
|
|
|
@staticmethod |
30
|
|
|
def on_get(req, resp): |
31
|
|
|
if 'API-KEY' not in req.headers or \ |
32
|
|
|
not isinstance(req.headers['API-KEY'], str) or \ |
33
|
|
|
len(str.strip(req.headers['API-KEY'])) == 0: |
34
|
|
|
access_control(req) |
35
|
|
|
else: |
36
|
|
|
api_key_control(req) |
37
|
|
|
print(req.params) |
38
|
|
|
# this procedure accepts energy storage power station id or |
39
|
|
|
# energy storage power station uuid to identify a energy storage power station |
40
|
|
|
point_id_list = req.params.get('pointids').split(",") |
41
|
|
|
reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
42
|
|
|
reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
43
|
|
|
language = req.params.get('language') |
44
|
|
|
quick_mode = req.params.get('quickmode') |
45
|
|
|
|
46
|
|
|
################################################################################################################ |
47
|
|
|
# Step 1: valid parameters |
48
|
|
|
################################################################################################################ |
49
|
|
|
|
50
|
|
|
timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
51
|
|
|
if config.utc_offset[0] == '-': |
52
|
|
|
timezone_offset = -timezone_offset |
53
|
|
|
|
54
|
|
|
if reporting_period_start_datetime_local is None: |
55
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
56
|
|
|
description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
57
|
|
|
else: |
58
|
|
|
reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
59
|
|
|
try: |
60
|
|
|
reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
61
|
|
|
'%Y-%m-%dT%H:%M:%S') |
62
|
|
|
except ValueError: |
63
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
64
|
|
|
description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
65
|
|
|
reporting_start_datetime_utc = \ |
66
|
|
|
reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
67
|
|
|
# nomalize the start datetime |
68
|
|
|
if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
69
|
|
|
reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
70
|
|
|
else: |
71
|
|
|
reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
72
|
|
|
|
73
|
|
|
if reporting_period_end_datetime_local is None: |
74
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
75
|
|
|
description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
76
|
|
|
else: |
77
|
|
|
reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
78
|
|
|
try: |
79
|
|
|
reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
80
|
|
|
'%Y-%m-%dT%H:%M:%S') |
81
|
|
|
except ValueError: |
82
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
83
|
|
|
description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
84
|
|
|
reporting_end_datetime_utc = reporting_end_datetime_utc.replace(tzinfo=timezone.utc) - \ |
85
|
|
|
timedelta(minutes=timezone_offset) |
86
|
|
|
|
87
|
|
|
if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
88
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
89
|
|
|
description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
90
|
|
|
|
91
|
|
|
# if turn quick mode on, do not return parameters data and excel file |
92
|
|
|
is_quick_mode = False |
93
|
|
|
if quick_mode is not None and \ |
94
|
|
|
len(str.strip(quick_mode)) > 0 and \ |
95
|
|
|
str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
96
|
|
|
is_quick_mode = True |
97
|
|
|
|
98
|
|
|
trans = utilities.get_translation(language) |
99
|
|
|
trans.install() |
100
|
|
|
_ = trans.gettext |
101
|
|
|
|
102
|
|
|
################################################################################################################ |
103
|
|
|
# Step 5: query associated points |
104
|
|
|
################################################################################################################ |
105
|
|
|
cnx_system = mysql.connector.connect(**config.myems_system_db) |
106
|
|
|
cursor_system = cnx_system.cursor() |
107
|
|
|
|
108
|
|
|
cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
109
|
|
|
cursor_historical = cnx_historical.cursor() |
110
|
|
|
|
111
|
|
|
point_list = list() |
112
|
|
|
cursor_system.execute(" SELECT id, name, units, object_type " |
113
|
|
|
" FROM tbl_points " |
114
|
|
|
" WHERE id IN ( " + ', '.join(map(str, point_id_list)) + ") ", ()) |
115
|
|
|
rows_points = cursor_system.fetchall() |
116
|
|
|
if rows_points is not None and len(rows_points) > 0: |
117
|
|
|
for row_point in rows_points: |
118
|
|
|
point_list.append({"id": row_point[0], |
119
|
|
|
"name": row_point[1], |
120
|
|
|
"units": row_point[2], |
121
|
|
|
"object_type": row_point[3]}) |
122
|
|
|
|
123
|
|
|
################################################################################################################ |
124
|
|
|
# Step 6: query associated points data |
125
|
|
|
################################################################################################################ |
126
|
|
|
parameters_data = dict() |
127
|
|
|
parameters_data['names'] = list() |
128
|
|
|
parameters_data['timestamps'] = list() |
129
|
|
|
parameters_data['values'] = list() |
130
|
|
|
|
131
|
|
|
for point in point_list: |
132
|
|
|
point_values = [] |
133
|
|
|
point_timestamps = [] |
134
|
|
|
if point['object_type'] == 'ENERGY_VALUE': |
135
|
|
|
query = (" SELECT utc_date_time, actual_value " |
136
|
|
|
" FROM tbl_energy_value " |
137
|
|
|
" WHERE point_id = %s " |
138
|
|
|
" AND utc_date_time BETWEEN %s AND %s " |
139
|
|
|
" ORDER BY utc_date_time ") |
140
|
|
|
cursor_historical.execute(query, (point['id'], |
141
|
|
|
reporting_start_datetime_utc, |
142
|
|
|
reporting_end_datetime_utc)) |
143
|
|
|
rows = cursor_historical.fetchall() |
144
|
|
|
if rows is not None and len(rows) > 0: |
145
|
|
|
for row in rows: |
146
|
|
|
current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
147
|
|
|
timedelta(minutes=timezone_offset) |
148
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[0:19]) |
149
|
|
|
point_values.append(row[1]) |
150
|
|
|
|
151
|
|
|
elif point['object_type'] == 'ANALOG_VALUE': |
152
|
|
|
query = (" SELECT utc_date_time, actual_value " |
153
|
|
|
" FROM tbl_analog_value " |
154
|
|
|
" WHERE point_id = %s " |
155
|
|
|
" AND utc_date_time BETWEEN %s AND %s " |
156
|
|
|
" ORDER BY utc_date_time ") |
157
|
|
|
cursor_historical.execute(query, (point['id'], |
158
|
|
|
reporting_start_datetime_utc, |
159
|
|
|
reporting_end_datetime_utc)) |
160
|
|
|
rows = cursor_historical.fetchall() |
161
|
|
|
if rows is not None and len(rows) > 0: |
162
|
|
|
for row in rows: |
163
|
|
|
current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
164
|
|
|
timedelta(minutes=timezone_offset) |
165
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[0:19]) |
166
|
|
|
point_values.append(row[1]) |
167
|
|
|
elif point['object_type'] == 'DIGITAL_VALUE': |
168
|
|
|
query = (" SELECT utc_date_time, actual_value " |
169
|
|
|
" FROM tbl_digital_value " |
170
|
|
|
" WHERE point_id = %s " |
171
|
|
|
" AND utc_date_time BETWEEN %s AND %s " |
172
|
|
|
" ORDER BY utc_date_time ") |
173
|
|
|
cursor_historical.execute(query, (point['id'], |
174
|
|
|
reporting_start_datetime_utc, |
175
|
|
|
reporting_end_datetime_utc)) |
176
|
|
|
rows = cursor_historical.fetchall() |
177
|
|
|
if rows is not None and len(rows) > 0: |
178
|
|
|
for row in rows: |
179
|
|
|
current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ |
180
|
|
|
timedelta(minutes=timezone_offset) |
181
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[0:19]) |
182
|
|
|
point_values.append(row[1]) |
183
|
|
|
|
184
|
|
|
parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
185
|
|
|
parameters_data['timestamps'].append(point_timestamps) |
186
|
|
|
parameters_data['values'].append(point_values) |
187
|
|
|
|
188
|
|
|
if cursor_system: |
189
|
|
|
cursor_system.close() |
190
|
|
|
if cnx_system: |
191
|
|
|
cnx_system.close() |
192
|
|
|
|
193
|
|
|
if cursor_historical: |
194
|
|
|
cursor_historical.close() |
195
|
|
|
if cnx_historical: |
196
|
|
|
cnx_historical.close() |
197
|
|
|
################################################################################################################ |
198
|
|
|
# Step 9: construct the report |
199
|
|
|
################################################################################################################ |
200
|
|
|
result = dict() |
201
|
|
|
result['parameters'] = { |
202
|
|
|
"names": parameters_data['names'], |
203
|
|
|
"timestamps": parameters_data['timestamps'], |
204
|
|
|
"values": parameters_data['values'] |
205
|
|
|
} |
206
|
|
|
|
207
|
|
|
# export result to Excel file and then encode the file to base64 string |
208
|
|
|
if not is_quick_mode: |
209
|
|
|
result['excel_bytes_base64'] = \ |
210
|
|
|
excelexporters.energystoragepowerstationreportingparameters.\ |
211
|
|
|
export(result, |
212
|
|
|
None, |
213
|
|
|
reporting_period_start_datetime_local, |
214
|
|
|
reporting_period_end_datetime_local, |
215
|
|
|
language) |
216
|
|
|
resp.text = json.dumps(result) |
217
|
|
|
|