1
|
|
|
from logging import getLogger |
2
|
|
|
|
3
|
|
|
from django.conf import settings |
4
|
|
|
from django.core.paginator import Paginator |
5
|
|
|
from django.http import HttpResponseNotFound |
6
|
|
|
from django.shortcuts import render |
7
|
|
|
try: |
8
|
|
|
from django.utils.datastructures import SortedDict as OrderedDict |
9
|
|
|
except ImportError: |
10
|
|
|
from django.utils.datastructures import OrderedDict |
11
|
|
|
|
12
|
|
|
from django.utils.functional import curry |
13
|
|
|
from redis.exceptions import ResponseError |
14
|
|
|
|
15
|
|
|
from .utils import LazySlicingIterable |
16
|
|
|
from .utils import PY3 |
17
|
|
|
|
18
|
|
|
logger = getLogger(__name__) |
19
|
|
|
|
20
|
|
|
REDISBOARD_ITEMS_PER_PAGE = getattr(settings, 'REDISBOARD_ITEMS_PER_PAGE', 100) |
21
|
|
|
|
22
|
|
|
|
23
|
|
|
def safeint(value): |
24
|
|
|
try: |
25
|
|
|
return int(value) |
26
|
|
|
except ValueError: |
27
|
|
|
return value |
28
|
|
|
|
29
|
|
|
|
30
|
|
|
def _fixup_pair(pair): |
31
|
|
|
a, b = pair |
32
|
|
|
return a, safeint(b) |
33
|
|
|
|
34
|
|
|
|
35
|
|
|
LENGTH_GETTERS = { |
36
|
|
|
b'list': lambda conn, key: conn.llen(key), |
37
|
|
|
b'string': lambda conn, key: conn.strlen(key), |
38
|
|
|
b'set': lambda conn, key: conn.scard(key), |
39
|
|
|
b'zset': lambda conn, key: conn.zcount(key, '-inf', '+inf'), |
40
|
|
|
b'hash': lambda conn, key: conn.hlen(key), |
41
|
|
|
} |
42
|
|
|
|
43
|
|
|
|
44
|
|
|
def _get_key_info(conn, key): |
45
|
|
|
try: |
46
|
|
|
obj_type = conn.type(key) |
47
|
|
|
pipe = conn.pipeline() |
48
|
|
|
try: |
49
|
|
|
pipe.object('REFCOUNT', key) |
50
|
|
|
pipe.object('ENCODING', key) |
51
|
|
|
pipe.object('IDLETIME', key) |
52
|
|
|
LENGTH_GETTERS[obj_type](pipe, key) |
53
|
|
|
pipe.ttl(key) |
54
|
|
|
|
55
|
|
|
refcount, encoding, idletime, obj_length, obj_ttl = pipe.execute() |
56
|
|
|
except ResponseError as exc: |
57
|
|
|
logger.exception("Failed to get object info for key %r: %s", key, exc) |
58
|
|
|
return { |
59
|
|
|
'type': obj_type, |
60
|
|
|
'name': key, |
61
|
|
|
'length': "n/a", |
62
|
|
|
'error': str(exc), |
63
|
|
|
'ttl': "n/a", |
64
|
|
|
'refcount': "n/a", |
65
|
|
|
'encoding': "n/a", |
66
|
|
|
'idletime': "n/a", |
67
|
|
|
} |
68
|
|
|
return { |
69
|
|
|
'type': obj_type, |
70
|
|
|
'name': key, |
71
|
|
|
'length': obj_length, |
72
|
|
|
'ttl': obj_ttl, |
73
|
|
|
'refcount': refcount, |
74
|
|
|
'encoding': encoding, |
75
|
|
|
'idletime': idletime, |
76
|
|
|
} |
77
|
|
|
except ResponseError as exc: |
78
|
|
|
logger.exception("Failed to get details for key %r: %s", key, exc) |
79
|
|
|
return { |
80
|
|
|
'type': "n/a", |
81
|
|
|
'length': "n/a", |
82
|
|
|
'name': key, |
83
|
|
|
'error': str(exc), |
84
|
|
|
'ttl': "n/a", |
85
|
|
|
'refcount': "n/a", |
86
|
|
|
'encoding': "n/a", |
87
|
|
|
'idletime': "n/a", |
88
|
|
|
} |
89
|
|
|
|
90
|
|
|
VALUE_GETTERS = { |
91
|
|
|
b'list': lambda conn, key, start=0, end=-1: [(pos + start, val) |
92
|
|
|
for (pos, val) in enumerate(conn.lrange(key, start, end))], |
93
|
|
|
b'string': lambda conn, key, *args: [('string', conn.get(key))], |
94
|
|
|
b'set': lambda conn, key, *args: list(enumerate(conn.smembers(key))), |
95
|
|
|
b'zset': lambda conn, key, start=0, end=-1: [(pos + start, val) |
96
|
|
|
for (pos, val) in enumerate(conn.zrange(key, start, end))], |
97
|
|
|
b'hash': lambda conn, key, *args: conn.hgetall(key).items(), |
98
|
|
|
b'n/a': lambda conn, key, *args: (), |
99
|
|
|
} |
100
|
|
|
|
101
|
|
|
|
102
|
|
|
def _get_key_details(conn, db, key, page): |
103
|
|
|
conn.execute_command('SELECT', db) |
104
|
|
|
details = _get_key_info(conn, key) |
105
|
|
|
details['db'] = db |
106
|
|
|
if details['type'] in ('list', 'zset'): |
107
|
|
|
details['data'] = Paginator( |
108
|
|
|
LazySlicingIterable( |
109
|
|
|
lambda: details['length'], |
110
|
|
|
curry(VALUE_GETTERS[details['type']], conn, key) |
111
|
|
|
), |
112
|
|
|
REDISBOARD_ITEMS_PER_PAGE |
113
|
|
|
).page(page) |
114
|
|
|
else: |
115
|
|
|
details['data'] = VALUE_GETTERS[details['type']](conn, key) |
116
|
|
|
|
117
|
|
|
return details |
118
|
|
|
|
119
|
|
|
def _raw_get_db_summary(server, db): |
120
|
|
|
server.connection.execute_command('SELECT', db) |
121
|
|
|
pipe = server.connection.pipeline() |
122
|
|
|
|
123
|
|
|
pipe.dbsize() |
124
|
|
|
for i in range(server.sampling_threshold): |
125
|
|
|
pipe.randomkey() |
126
|
|
|
|
127
|
|
|
results = pipe.execute() |
128
|
|
|
size = results.pop(0) |
129
|
|
|
keys = sorted(set(results)) |
130
|
|
|
|
131
|
|
|
pipe = server.connection.pipeline() |
132
|
|
|
for key in keys: |
133
|
|
|
pipe.execute_command('DEBUG', 'OBJECT', key) |
134
|
|
|
pipe.ttl(key) |
135
|
|
|
|
136
|
|
|
total_memory = 0 |
137
|
|
|
volatile_memory = 0 |
138
|
|
|
persistent_memory = 0 |
139
|
|
|
total_keys = 0 |
140
|
|
|
volatile_keys = 0 |
141
|
|
|
persistent_keys = 0 |
142
|
|
|
results = pipe.execute() |
143
|
|
|
for key, details, ttl in zip(keys, results[::2], results[1::2]): |
144
|
|
|
if not isinstance(details, dict): |
145
|
|
|
details = dict(_fixup_pair(i.split(b':')) |
146
|
|
|
for i in details.split() if b':' in i) |
147
|
|
|
|
148
|
|
|
length = details[b'serializedlength'] + len(key) |
149
|
|
|
|
150
|
|
|
if ttl: |
151
|
|
|
persistent_memory += length |
152
|
|
|
persistent_keys += 1 |
153
|
|
|
else: |
154
|
|
|
volatile_memory += length |
155
|
|
|
volatile_keys += 1 |
156
|
|
|
total_memory += length |
157
|
|
|
total_keys += 1 |
158
|
|
|
|
159
|
|
|
if total_keys: |
160
|
|
|
total_memory = (total_memory / total_keys) * size |
161
|
|
|
else: |
162
|
|
|
total_memory = 0 |
163
|
|
|
|
164
|
|
|
if persistent_keys: |
165
|
|
|
persistent_memory = (persistent_memory / persistent_keys) * size |
166
|
|
|
else: |
167
|
|
|
persistent_memory = 0 |
168
|
|
|
|
169
|
|
|
if volatile_keys: |
170
|
|
|
volatile_memory = (volatile_memory / volatile_keys) * size |
171
|
|
|
else: |
172
|
|
|
volatile_memory = 0 |
173
|
|
|
return dict( |
174
|
|
|
size=size, |
175
|
|
|
total_memory=total_memory, |
176
|
|
|
volatile_memory=volatile_memory, |
177
|
|
|
persistent_memory=persistent_memory, |
178
|
|
|
) |
179
|
|
|
|
180
|
|
|
def _get_db_summary(server, db): |
181
|
|
|
try: |
182
|
|
|
return _raw_get_db_summary(server, db) |
183
|
|
|
except ResponseError as exc: |
184
|
|
|
logger.exception("Failed to get summary for db %r: %s", db, exc) |
185
|
|
|
return dict( |
186
|
|
|
size=0, |
187
|
|
|
total_memory=0, |
188
|
|
|
volatile_memory=0, |
189
|
|
|
persistent_memory=0, |
190
|
|
|
) |
191
|
|
|
|
192
|
|
|
|
193
|
|
|
def _get_db_details(server, db): |
194
|
|
|
conn = server.connection |
195
|
|
|
conn.execute_command('SELECT', db) |
196
|
|
|
size = conn.dbsize() |
197
|
|
|
|
198
|
|
|
key_details = {} |
199
|
|
|
if size > server.sampling_threshold: |
200
|
|
|
sampling = True |
201
|
|
|
pipe = conn.pipeline() |
202
|
|
|
for _ in (range if PY3 else xrange)(server.sampling_size): # flake8=noqa |
203
|
|
|
pipe.randomkey() |
204
|
|
|
|
205
|
|
|
for key in set(pipe.execute()): |
206
|
|
|
key_details[key] = _get_key_info(conn, key) |
207
|
|
|
|
208
|
|
|
else: |
209
|
|
|
sampling = False |
210
|
|
|
for key in conn.keys(): |
211
|
|
|
key_details[key] = _get_key_info(conn, key) |
212
|
|
|
|
213
|
|
|
return dict( |
214
|
|
|
keys=key_details, |
215
|
|
|
sampling=sampling, |
216
|
|
|
) |
217
|
|
|
|
218
|
|
|
|
219
|
|
|
def inspect(request, server): |
220
|
|
|
stats = server.stats |
221
|
|
|
conn = server.connection |
222
|
|
|
database_details = OrderedDict() |
223
|
|
|
key_details = None |
224
|
|
|
|
225
|
|
|
if stats['status'] == 'UP': |
226
|
|
|
if 'key' in request.GET: |
227
|
|
|
key = request.GET['key'] |
228
|
|
|
db = request.GET.get('db', 0) |
229
|
|
|
page = request.GET.get('page', 1) |
230
|
|
|
key_details = _get_key_details(conn, db, key, page) |
231
|
|
|
else: |
232
|
|
|
databases = sorted(name[2:] for name in conn.info() |
233
|
|
|
if name.startswith('db')) |
234
|
|
|
total_size = 0 |
235
|
|
|
for db in databases: |
236
|
|
|
database_details[db] = summary = _get_db_summary(server, db) |
237
|
|
|
total_size += summary['size'] |
238
|
|
|
if total_size < server.sampling_threshold: |
239
|
|
|
for db in databases: |
240
|
|
|
database_details[db].update( |
241
|
|
|
_get_db_details(server, db), |
242
|
|
|
active=True, |
243
|
|
|
) |
244
|
|
|
elif 'db' in request.GET: |
245
|
|
|
db = request.GET['db'] |
246
|
|
|
if db in database_details: |
247
|
|
|
database_details[db].update( |
248
|
|
|
_get_db_details(server, db), |
249
|
|
|
active=True, |
250
|
|
|
) |
251
|
|
|
else: |
252
|
|
|
return HttpResponseNotFound("Unknown database.") |
253
|
|
|
|
254
|
|
|
return render(request, "redisboard/inspect.html", { |
255
|
|
|
'databases': database_details, |
256
|
|
|
'key_details': key_details, |
257
|
|
|
'original': server, |
258
|
|
|
'stats': stats, |
259
|
|
|
'app_label': 'redisboard', |
260
|
|
|
}) |
261
|
|
|
|