|
1
|
|
|
import re |
|
2
|
|
|
from datetime import datetime, timedelta, timezone |
|
3
|
|
|
import falcon |
|
4
|
|
|
import mysql.connector |
|
5
|
|
|
import simplejson as json |
|
6
|
|
|
import config |
|
7
|
|
|
import excelexporters.energystoragepowerstationreportingparameters |
|
8
|
|
|
from core import utilities |
|
9
|
|
|
from core.useractivity import access_control, api_key_control |
|
10
|
|
|
|
|
11
|
|
|
|
|
12
|
|
|
class Reporting: |
|
13
|
|
|
def __init__(self): |
|
14
|
|
|
"""Initializes Class""" |
|
15
|
|
|
pass |
|
16
|
|
|
|
|
17
|
|
|
@staticmethod |
|
18
|
|
|
def on_options(req, resp): |
|
19
|
|
|
_ = req |
|
20
|
|
|
resp.status = falcon.HTTP_200 |
|
21
|
|
|
|
|
22
|
|
|
#################################################################################################################### |
|
23
|
|
|
# PROCEDURES |
|
24
|
|
|
# Step 1: valid parameters |
|
25
|
|
|
# Step 2: query associated points |
|
26
|
|
|
# Step 3: query associated points data |
|
27
|
|
|
# Step 4: construct the report |
|
28
|
|
|
#################################################################################################################### |
|
29
|
|
|
@staticmethod |
|
30
|
|
|
def on_get(req, resp): |
|
31
|
|
|
if 'API-KEY' not in req.headers or \ |
|
32
|
|
|
not isinstance(req.headers['API-KEY'], str) or \ |
|
33
|
|
|
len(str.strip(req.headers['API-KEY'])) == 0: |
|
34
|
|
|
access_control(req) |
|
35
|
|
|
else: |
|
36
|
|
|
api_key_control(req) |
|
37
|
|
|
print(req.params) |
|
38
|
|
|
# this procedure accepts energy storage power station id or |
|
39
|
|
|
# energy storage power station uuid to identify a energy storage power station |
|
40
|
|
|
point_id_list = req.params.get('pointids').split(",") |
|
41
|
|
|
reporting_period_start_datetime_local = req.params.get('reportingperiodstartdatetime') |
|
42
|
|
|
reporting_period_end_datetime_local = req.params.get('reportingperiodenddatetime') |
|
43
|
|
|
language = req.params.get('language') |
|
44
|
|
|
quick_mode = req.params.get('quickmode') |
|
45
|
|
|
|
|
46
|
|
|
################################################################################################################ |
|
47
|
|
|
# Step 1: valid parameters |
|
48
|
|
|
################################################################################################################ |
|
49
|
|
|
|
|
50
|
|
|
timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) |
|
51
|
|
|
if config.utc_offset[0] == '-': |
|
52
|
|
|
timezone_offset = -timezone_offset |
|
53
|
|
|
|
|
54
|
|
|
if reporting_period_start_datetime_local is None: |
|
55
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
56
|
|
|
description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
57
|
|
|
else: |
|
58
|
|
|
reporting_period_start_datetime_local = str.strip(reporting_period_start_datetime_local) |
|
59
|
|
|
try: |
|
60
|
|
|
reporting_start_datetime_utc = datetime.strptime(reporting_period_start_datetime_local, |
|
61
|
|
|
'%Y-%m-%dT%H:%M:%S') |
|
62
|
|
|
except ValueError: |
|
63
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
64
|
|
|
description="API.INVALID_REPORTING_PERIOD_START_DATETIME") |
|
65
|
|
|
reporting_start_datetime_utc = \ |
|
66
|
|
|
reporting_start_datetime_utc.replace(tzinfo=timezone.utc) - timedelta(minutes=timezone_offset) |
|
67
|
|
|
# nomalize the start datetime |
|
68
|
|
|
if config.minutes_to_count == 30 and reporting_start_datetime_utc.minute >= 30: |
|
69
|
|
|
reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=30, second=0, microsecond=0) |
|
70
|
|
|
else: |
|
71
|
|
|
reporting_start_datetime_utc = reporting_start_datetime_utc.replace(minute=0, second=0, microsecond=0) |
|
72
|
|
|
|
|
73
|
|
|
if reporting_period_end_datetime_local is None: |
|
74
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
75
|
|
|
description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
76
|
|
|
else: |
|
77
|
|
|
reporting_period_end_datetime_local = str.strip(reporting_period_end_datetime_local) |
|
78
|
|
|
try: |
|
79
|
|
|
reporting_end_datetime_utc = datetime.strptime(reporting_period_end_datetime_local, |
|
80
|
|
|
'%Y-%m-%dT%H:%M:%S') |
|
81
|
|
|
except ValueError: |
|
82
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
83
|
|
|
description="API.INVALID_REPORTING_PERIOD_END_DATETIME") |
|
84
|
|
|
reporting_end_datetime_utc = reporting_end_datetime_utc.replace(tzinfo=timezone.utc) - \ |
|
85
|
|
|
timedelta(minutes=timezone_offset) |
|
86
|
|
|
|
|
87
|
|
|
if reporting_start_datetime_utc >= reporting_end_datetime_utc: |
|
88
|
|
|
raise falcon.HTTPError(status=falcon.HTTP_400, title='API.BAD_REQUEST', |
|
89
|
|
|
description='API.INVALID_REPORTING_PERIOD_END_DATETIME') |
|
90
|
|
|
|
|
91
|
|
|
# if turn quick mode on, do not return parameters data and excel file |
|
92
|
|
|
is_quick_mode = False |
|
93
|
|
|
if quick_mode is not None and \ |
|
94
|
|
|
len(str.strip(quick_mode)) > 0 and \ |
|
95
|
|
|
str.lower(str.strip(quick_mode)) in ('true', 't', 'on', 'yes', 'y'): |
|
96
|
|
|
is_quick_mode = True |
|
97
|
|
|
|
|
98
|
|
|
trans = utilities.get_translation(language) |
|
99
|
|
|
trans.install() |
|
100
|
|
|
_ = trans.gettext |
|
101
|
|
|
|
|
102
|
|
|
################################################################################################################ |
|
103
|
|
|
# Step 5: query associated points |
|
104
|
|
|
################################################################################################################ |
|
105
|
|
|
cnx_system = mysql.connector.connect(**config.myems_system_db) |
|
106
|
|
|
cursor_system = cnx_system.cursor() |
|
107
|
|
|
|
|
108
|
|
|
cnx_historical = mysql.connector.connect(**config.myems_historical_db) |
|
109
|
|
|
cursor_historical = cnx_historical.cursor() |
|
110
|
|
|
|
|
111
|
|
|
point_list = list() |
|
112
|
|
|
cursor_system.execute(" SELECT id, name, units, object_type " |
|
113
|
|
|
" FROM tbl_points " |
|
114
|
|
|
" WHERE id IN ( " + ', '.join(map(str, point_id_list)) + ") ", ()) |
|
115
|
|
|
rows_points = cursor_system.fetchall() |
|
116
|
|
|
if rows_points is not None and len(rows_points) > 0: |
|
117
|
|
|
for row_point in rows_points: |
|
118
|
|
|
point_list.append({"id": row_point[0], |
|
119
|
|
|
"name": row_point[1], |
|
120
|
|
|
"units": row_point[2], |
|
121
|
|
|
"object_type": row_point[3]}) |
|
122
|
|
|
|
|
123
|
|
|
################################################################################################################ |
|
124
|
|
|
# Step 6: query associated points data |
|
125
|
|
|
################################################################################################################ |
|
126
|
|
|
parameters_data = dict() |
|
127
|
|
|
parameters_data['names'] = list() |
|
128
|
|
|
parameters_data['timestamps'] = list() |
|
129
|
|
|
parameters_data['values'] = list() |
|
130
|
|
|
|
|
131
|
|
|
for point in point_list: |
|
132
|
|
|
point_values = [] |
|
133
|
|
|
point_timestamps = [] |
|
134
|
|
|
if point['object_type'] == 'ENERGY_VALUE': |
|
135
|
|
|
query = (" SELECT utc_date_time, actual_value " |
|
136
|
|
|
" FROM tbl_energy_value " |
|
137
|
|
|
" WHERE point_id = %s " |
|
138
|
|
|
" AND utc_date_time BETWEEN %s AND %s " |
|
139
|
|
|
" ORDER BY utc_date_time ") |
|
140
|
|
|
cursor_historical.execute(query, (point['id'], |
|
141
|
|
|
reporting_start_datetime_utc, |
|
142
|
|
|
reporting_end_datetime_utc)) |
|
143
|
|
|
rows = cursor_historical.fetchall() |
|
144
|
|
|
|
|
145
|
|
|
if rows is not None and len(rows) > 0: |
|
146
|
|
|
reporting_start_datetime_local = reporting_start_datetime_utc.replace(tzinfo=timezone.utc) + \ |
|
147
|
|
|
timedelta(minutes=timezone_offset) |
|
148
|
|
|
current_datetime_local = reporting_start_datetime_local |
|
149
|
|
|
|
|
150
|
|
|
while current_datetime_local < rows[0][0].replace(tzinfo=timezone.utc) + \ |
|
151
|
|
|
timedelta(minutes=timezone_offset): |
|
152
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
|
153
|
|
|
point_values.append(rows[0][1]) |
|
154
|
|
|
current_datetime_local += timedelta(minutes=1) |
|
155
|
|
|
|
|
156
|
|
|
for index in range(len(rows) - 1): |
|
157
|
|
|
while current_datetime_local < rows[index + 1][0].replace(tzinfo=timezone.utc) + \ |
|
158
|
|
|
timedelta(minutes=timezone_offset): |
|
159
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
|
160
|
|
|
point_values.append(rows[index][1]) |
|
161
|
|
|
current_datetime_local += timedelta(minutes=1) |
|
162
|
|
|
elif point['object_type'] == 'ANALOG_VALUE': |
|
163
|
|
|
query = (" SELECT utc_date_time, actual_value " |
|
164
|
|
|
" FROM tbl_analog_value " |
|
165
|
|
|
" WHERE point_id = %s " |
|
166
|
|
|
" AND utc_date_time BETWEEN %s AND %s " |
|
167
|
|
|
" ORDER BY utc_date_time ") |
|
168
|
|
|
cursor_historical.execute(query, (point['id'], |
|
169
|
|
|
reporting_start_datetime_utc, |
|
170
|
|
|
reporting_end_datetime_utc)) |
|
171
|
|
|
rows = cursor_historical.fetchall() |
|
172
|
|
|
|
|
173
|
|
|
if rows is not None and len(rows) > 0: |
|
174
|
|
|
reporting_start_datetime_local = reporting_start_datetime_utc.replace(tzinfo=timezone.utc) + \ |
|
175
|
|
|
timedelta(minutes=timezone_offset) |
|
176
|
|
|
current_datetime_local = reporting_start_datetime_local |
|
177
|
|
|
|
|
178
|
|
|
while current_datetime_local < rows[0][0].replace(tzinfo=timezone.utc) + \ |
|
179
|
|
|
timedelta(minutes=timezone_offset): |
|
180
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
|
181
|
|
|
point_values.append(rows[0][1]) |
|
182
|
|
|
current_datetime_local += timedelta(minutes=1) |
|
183
|
|
|
|
|
184
|
|
|
for index in range(len(rows) - 1): |
|
185
|
|
|
while current_datetime_local < rows[index + 1][0].replace(tzinfo=timezone.utc) + \ |
|
186
|
|
|
timedelta(minutes=timezone_offset): |
|
187
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
|
188
|
|
|
point_values.append(rows[index][1]) |
|
189
|
|
|
current_datetime_local += timedelta(minutes=1) |
|
190
|
|
|
elif point['object_type'] == 'DIGITAL_VALUE': |
|
191
|
|
|
query = (" SELECT utc_date_time, actual_value " |
|
192
|
|
|
" FROM tbl_digital_value " |
|
193
|
|
|
" WHERE point_id = %s " |
|
194
|
|
|
" AND utc_date_time BETWEEN %s AND %s " |
|
195
|
|
|
" ORDER BY utc_date_time ") |
|
196
|
|
|
cursor_historical.execute(query, (point['id'], |
|
197
|
|
|
reporting_start_datetime_utc, |
|
198
|
|
|
reporting_end_datetime_utc)) |
|
199
|
|
|
rows = cursor_historical.fetchall() |
|
200
|
|
|
|
|
201
|
|
|
if rows is not None and len(rows) > 0: |
|
202
|
|
|
reporting_start_datetime_local = reporting_start_datetime_utc.replace(tzinfo=timezone.utc) + \ |
|
203
|
|
|
timedelta(minutes=timezone_offset) |
|
204
|
|
|
current_datetime_local = reporting_start_datetime_local |
|
205
|
|
|
|
|
206
|
|
|
while current_datetime_local < rows[0][0].replace(tzinfo=timezone.utc) + \ |
|
207
|
|
|
timedelta(minutes=timezone_offset): |
|
208
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
|
209
|
|
|
point_values.append(rows[0][1]) |
|
210
|
|
|
current_datetime_local += timedelta(minutes=1) |
|
211
|
|
|
|
|
212
|
|
|
for index in range(len(rows) - 1): |
|
213
|
|
|
while current_datetime_local < rows[index + 1][0].replace(tzinfo=timezone.utc) + \ |
|
214
|
|
|
timedelta(minutes=timezone_offset): |
|
215
|
|
|
point_timestamps.append(current_datetime_local.isoformat()[5:16]) |
|
216
|
|
|
point_values.append(rows[index][1]) |
|
217
|
|
|
current_datetime_local += timedelta(minutes=1) |
|
218
|
|
|
|
|
219
|
|
|
parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') |
|
220
|
|
|
parameters_data['timestamps'].append(point_timestamps) |
|
221
|
|
|
parameters_data['values'].append(point_values) |
|
222
|
|
|
|
|
223
|
|
|
if cursor_system: |
|
224
|
|
|
cursor_system.close() |
|
225
|
|
|
if cnx_system: |
|
226
|
|
|
cnx_system.close() |
|
227
|
|
|
|
|
228
|
|
|
if cursor_historical: |
|
229
|
|
|
cursor_historical.close() |
|
230
|
|
|
if cnx_historical: |
|
231
|
|
|
cnx_historical.close() |
|
232
|
|
|
################################################################################################################ |
|
233
|
|
|
# Step 9: construct the report |
|
234
|
|
|
################################################################################################################ |
|
235
|
|
|
result = dict() |
|
236
|
|
|
result['parameters'] = { |
|
237
|
|
|
"names": parameters_data['names'], |
|
238
|
|
|
"timestamps": parameters_data['timestamps'], |
|
239
|
|
|
"values": parameters_data['values'] |
|
240
|
|
|
} |
|
241
|
|
|
|
|
242
|
|
|
# export result to Excel file and then encode the file to base64 string |
|
243
|
|
|
if not is_quick_mode: |
|
244
|
|
|
result['excel_bytes_base64'] = \ |
|
245
|
|
|
excelexporters.energystoragepowerstationreportingparameters.\ |
|
246
|
|
|
export(result, |
|
247
|
|
|
None, |
|
248
|
|
|
reporting_period_start_datetime_local, |
|
249
|
|
|
reporting_period_end_datetime_local, |
|
250
|
|
|
language) |
|
251
|
|
|
resp.text = json.dumps(result) |
|
252
|
|
|
|