|
1
|
|
|
import falcon |
|
2
|
|
|
import mysql.connector |
|
3
|
|
|
import simplejson as json |
|
4
|
|
|
from datetime import timezone, timedelta |
|
5
|
|
|
import re |
|
6
|
|
|
|
|
7
|
|
|
import config |
|
8
|
|
|
from core.useractivity import admin_control |
|
9
|
|
|
|
|
10
|
|
|
|
|
11
|
|
|
class LogCollection: |
|
12
|
|
|
""" |
|
13
|
|
|
Operation log collection resource. |
|
14
|
|
|
|
|
15
|
|
|
This resource provides read-only access to records stored in |
|
16
|
|
|
`myems_user_db.tbl_logs`, which are written by `useractivity.write_log`. |
|
17
|
|
|
""" |
|
18
|
|
|
|
|
19
|
|
|
def __init__(self): |
|
20
|
|
|
pass |
|
21
|
|
|
|
|
22
|
|
|
@staticmethod |
|
23
|
|
|
def on_options(req, resp): |
|
24
|
|
|
""" |
|
25
|
|
|
Handle OPTIONS request for CORS preflight. |
|
26
|
|
|
""" |
|
27
|
|
|
_ = req |
|
28
|
|
|
resp.status = falcon.HTTP_200 |
|
29
|
|
|
|
|
30
|
|
|
@staticmethod |
|
31
|
|
|
def on_get(req, resp): |
|
32
|
|
|
""" |
|
33
|
|
|
Handle GET requests to retrieve operation logs. |
|
34
|
|
|
|
|
35
|
|
|
Optional query parameters: |
|
36
|
|
|
- limit: maximum number of records to return (default 100, max 1000) |
|
37
|
|
|
""" |
|
38
|
|
|
admin_control(req) |
|
39
|
|
|
|
|
40
|
|
|
# parse and clamp limit parameter |
|
41
|
|
|
limit = req.get_param_as_int('limit') or 100 |
|
42
|
|
|
if limit <= 0: |
|
43
|
|
|
limit = 100 |
|
44
|
|
|
if limit > 1000: |
|
45
|
|
|
limit = 1000 |
|
46
|
|
|
|
|
47
|
|
|
cnx = mysql.connector.connect(**config.myems_user_db) |
|
48
|
|
|
cursor = cnx.cursor(dictionary=True) |
|
49
|
|
|
|
|
50
|
|
|
query = (" SELECT l.id, l.user_uuid, u.name, u.display_name, " |
|
51
|
|
|
" l.request_datetime_utc, l.request_method, l.resource_type, " |
|
52
|
|
|
" l.resource_id, l.request_body " |
|
53
|
|
|
" FROM tbl_logs l " |
|
54
|
|
|
" LEFT JOIN tbl_users u ON l.user_uuid = u.uuid " |
|
55
|
|
|
" ORDER BY l.request_datetime_utc DESC " |
|
56
|
|
|
" LIMIT %s ") |
|
57
|
|
|
cursor.execute(query, (limit,)) |
|
58
|
|
|
rows = cursor.fetchall() |
|
59
|
|
|
|
|
60
|
|
|
result = [] |
|
61
|
|
|
|
|
62
|
|
|
# Parse time zone offset using regex for robustness |
|
63
|
|
|
match = re.match(r'([+-]?)(\d{2}):?(\d{2})', config.utc_offset) |
|
64
|
|
|
if match: |
|
65
|
|
|
sign, hours, minutes = match.groups() |
|
66
|
|
|
# Default to + if no sign provided |
|
67
|
|
|
if sign == '': |
|
68
|
|
|
sign = '+' |
|
69
|
|
|
offset = timedelta(hours=int(hours), minutes=int(minutes)) |
|
70
|
|
|
if sign == '-': |
|
71
|
|
|
offset = -offset |
|
72
|
|
|
timezone_offset = offset.total_seconds() / 60 |
|
73
|
|
|
else: |
|
74
|
|
|
# Fallback or raise error, here we default to 0 if invalid format to prevent crash |
|
75
|
|
|
print(f"Invalid UTC offset format: {config.utc_offset}") |
|
76
|
|
|
timezone_offset = 0 |
|
77
|
|
|
|
|
78
|
|
|
for row in rows: |
|
79
|
|
|
# row['request_datetime_utc'] is in UTC |
|
80
|
|
|
if row['request_datetime_utc'] is not None: |
|
81
|
|
|
local_dt = (row['request_datetime_utc'].replace(tzinfo=timezone.utc) + |
|
82
|
|
|
timedelta(minutes=timezone_offset)).isoformat()[0:19] |
|
83
|
|
|
utc_dt = row['request_datetime_utc'].isoformat()[0:19] |
|
84
|
|
|
else: |
|
85
|
|
|
local_dt = None |
|
86
|
|
|
utc_dt = None |
|
87
|
|
|
|
|
88
|
|
|
result.append({ |
|
89
|
|
|
"id": row['id'], |
|
90
|
|
|
"user_uuid": row['user_uuid'], |
|
91
|
|
|
"user_name": row['name'], |
|
92
|
|
|
"user_display_name": row['display_name'], |
|
93
|
|
|
"request_datetime": local_dt, |
|
94
|
|
|
"request_datetime_utc": utc_dt, |
|
95
|
|
|
"request_method": row['request_method'], |
|
96
|
|
|
"resource_type": row['resource_type'], |
|
97
|
|
|
"resource_id": row['resource_id'], |
|
98
|
|
|
"request_body": row['request_body'], |
|
99
|
|
|
}) |
|
100
|
|
|
|
|
101
|
|
|
cursor.close() |
|
102
|
|
|
cnx.close() |
|
103
|
|
|
|
|
104
|
|
|
resp.text = json.dumps(result) |
|
105
|
|
|
|