1
|
|
|
from pyramid.events import subscriber |
2
|
|
|
from pyramid.events import BeforeRender |
3
|
|
|
from pyramid.httpexceptions import HTTPFound |
4
|
|
|
from pyramid.renderers import render |
5
|
|
|
from pyramid.renderers import render_to_response |
6
|
|
|
from pyramid.response import Response |
7
|
|
|
from pyramid.response import FileResponse |
8
|
|
|
from pyramid.view import view_config, forbidden_view_config |
9
|
|
|
|
10
|
|
|
from sqlalchemy.sql import func |
11
|
|
|
from sqlalchemy.exc import DBAPIError |
12
|
|
|
from sqlalchemy.orm.exc import NoResultFound |
13
|
|
|
|
14
|
|
|
from .models import * |
15
|
|
|
from .models.model import * |
16
|
|
|
from .models import user as __user |
17
|
|
|
from .models.user import User |
18
|
|
|
from .models.item import Item |
19
|
|
|
from .models.box import Box |
20
|
|
|
from .models.box_item import BoxItem |
21
|
|
|
from .models.transaction import Transaction, Deposit, CashDeposit, BTCDeposit |
22
|
|
|
from .models.transaction import PurchaseLineItem, SubTransaction |
23
|
|
|
from .models.account import Account, VirtualAccount, CashAccount |
24
|
|
|
from .models.event import Event |
25
|
|
|
from .models import event as __event |
26
|
|
|
from .models.vendor import Vendor |
27
|
|
|
from .models.item_vendor import ItemVendor |
28
|
|
|
from .models.request import Request |
29
|
|
|
from .models.announcement import Announcement |
30
|
|
|
from .models.btcdeposit import BtcPendingDeposit |
31
|
|
|
from .models.receipt import Receipt |
32
|
|
|
|
33
|
|
|
from pyramid.security import Allow, Everyone, remember, forget |
34
|
|
|
|
35
|
|
|
import chezbetty.datalayer as datalayer |
36
|
|
|
from .btc import Bitcoin, BTCException |
37
|
|
|
|
38
|
|
|
# Used for generating barcodes |
39
|
|
|
from reportlab.graphics.barcode import code39 |
40
|
|
|
from reportlab.graphics.barcode import code93 |
41
|
|
|
from reportlab.lib.pagesizes import letter |
42
|
|
|
from reportlab.lib.units import mm, inch |
43
|
|
|
from reportlab.pdfgen import canvas |
44
|
|
|
|
45
|
|
|
from . import utility |
46
|
|
|
import arrow |
47
|
|
|
|
48
|
|
|
class InvalidMetric(Exception): |
49
|
|
|
pass |
50
|
|
|
|
51
|
|
|
# fix_timezone |
52
|
|
|
def ftz(i): |
53
|
|
|
return i |
54
|
|
|
#if type(i) is datetime.date: |
55
|
|
|
# i = datetime.datetime(i.year, i.month, i.day) |
56
|
|
|
#return pytz.timezone('America/Detroit').localize(i).astimezone(tz=pytz.timezone('UTC')) |
57
|
|
|
|
58
|
|
|
|
59
|
|
|
def get_start(days): |
60
|
|
|
if days: |
61
|
|
|
# "now" is really midnight tonight, so we really want tomorrows date. |
62
|
|
|
# This makes the comparisons and math work so 1 day would mean today |
63
|
|
|
now = arrow.utcnow() + datetime.timedelta(days=1) |
64
|
|
|
delta = datetime.timedelta(days=days) |
65
|
|
|
return now - delta |
66
|
|
|
else: |
67
|
|
|
# Hard code in when Betty started |
68
|
|
|
return arrow.get(datetime.date(year=2014, month=7, day=8)) |
69
|
|
|
|
70
|
|
|
def get_end(): |
71
|
|
|
return arrow.utcnow() + datetime.timedelta(days=1) |
72
|
|
|
|
73
|
|
|
|
74
|
|
|
def create_x_y_from_group(group, start, end, period, process_output=lambda x: x, default=0): |
75
|
|
|
x = [] |
76
|
|
|
y = [] |
77
|
|
|
|
78
|
|
|
if period == 'year': |
79
|
|
|
dt = datetime.timedelta(days=365) |
80
|
|
|
fmt_str = '{}' |
81
|
|
|
elif period == 'month': |
82
|
|
|
dt = datetime.timedelta(days=30) |
83
|
|
|
fmt_str = '{}-{:02}' |
84
|
|
|
elif period == 'day': |
85
|
|
|
dt = datetime.timedelta(days=1) |
86
|
|
|
fmt_str = '{}-{:02}-{:02}' |
87
|
|
|
|
88
|
|
|
# Apparently this is a copy operation |
89
|
|
|
if start == datetime.date.min: |
90
|
|
|
ptr = group[0][0] |
91
|
|
|
else: |
92
|
|
|
ptr = start |
93
|
|
|
|
94
|
|
|
for d,total in group: |
95
|
|
|
# Fill in days with no data |
96
|
|
|
while ptr < arrow.get(datetime.date(d.year, d.month, d.day)): |
97
|
|
|
x.append(fmt_str.format(ptr.year, ptr.month, ptr.day)) |
98
|
|
|
y.append(default) |
99
|
|
|
ptr += dt |
100
|
|
|
|
101
|
|
|
x.append(fmt_str.format(d.year, d.month, d.day)) |
102
|
|
|
y.append(process_output(total)) |
103
|
|
|
|
104
|
|
|
ptr += dt |
105
|
|
|
|
106
|
|
|
# Fill in the end |
107
|
|
|
while ptr < end: |
108
|
|
|
x.append(fmt_str.format(ptr.year, ptr.month, ptr.day)) |
109
|
|
|
y.append(default) |
110
|
|
|
ptr += dt |
111
|
|
|
return x,y |
112
|
|
|
|
113
|
|
|
def datetime_to_timestamps (data, process_output=lambda x: x): |
114
|
|
|
out = [] |
115
|
|
|
for d in data: |
116
|
|
|
t = arrow.get( |
117
|
|
|
datetime.datetime( |
118
|
|
|
year=d[0].year, |
119
|
|
|
month=d[0].month, |
120
|
|
|
day=d[0].day, |
121
|
|
|
hour=12, |
122
|
|
|
) |
123
|
|
|
).timestamp * 1000 |
124
|
|
|
#t = round(datetime.datetime(year=d[0].year, month=d[0].month, day=d[0].day, hour=12)\ |
125
|
|
|
# .replace(tzinfo=datetime.timezone.utc).timestamp()*1000) |
126
|
|
|
# t = round(datetime.datetime.combine(d[0], datetime.datetime.min.time())\ |
127
|
|
|
# .replace(tzinfo=datetime.timezone.utc).timestamp()*1000) |
128
|
|
|
out.append((t, process_output(d[1]))) |
129
|
|
|
return out |
130
|
|
|
|
131
|
|
|
|
132
|
|
|
# Get x,y for some data metric |
133
|
|
|
# |
134
|
|
|
# start: datetime.datetime that all data must be at or after |
135
|
|
|
# end: datetime.datetime that all data must be before |
136
|
|
|
# metric: 'items', 'sales', or 'deposits' |
137
|
|
|
# period: 'day', 'month', or 'year' |
138
|
|
|
def admin_data_period_range(start, end, metric, period): |
139
|
|
|
if metric == 'items': |
140
|
|
|
data = PurchaseLineItem.quantity_by_period(period, start=ftz(start), end=ftz(end)) |
141
|
|
|
return zip(create_x_y_from_group(data, start, end, period)) |
142
|
|
|
elif metric == 'sales': |
143
|
|
|
data = PurchaseLineItem.virtual_revenue_by_period(period, start=ftz(start), end=ftz(end)) |
144
|
|
|
return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
145
|
|
|
elif metric == 'deposits': |
146
|
|
|
data = Deposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
147
|
|
|
return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
148
|
|
|
elif metric == 'deposits_cash': |
|
|
|
|
149
|
|
|
data = CashDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
150
|
|
|
return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
151
|
|
|
elif metric == 'deposits_btc': |
152
|
|
|
data = BTCDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
153
|
|
|
return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
154
|
|
|
else: |
155
|
|
|
raise(InvalidMetric(metric)) |
156
|
|
|
|
157
|
|
|
|
158
|
|
|
def admin_data_period(num_days, metric, period): |
159
|
|
|
return admin_data_period_range(get_start(num_days), get_end(), metric, period) |
160
|
|
|
|
161
|
|
|
def admin_data_highcharts_period(metric, period): |
162
|
|
|
start = get_start(0) |
163
|
|
|
end = get_end() |
164
|
|
|
if metric == 'items': |
165
|
|
|
data = PurchaseLineItem.quantity_by_period(period, start=ftz(start), end=ftz(end)) |
166
|
|
|
return datetime_to_timestamps(data) |
167
|
|
|
elif metric == 'sales': |
168
|
|
|
data = PurchaseLineItem.virtual_revenue_by_period(period, start=ftz(start), end=ftz(end)) |
169
|
|
|
return datetime_to_timestamps(data, float) |
170
|
|
|
elif metric == 'deposits': |
171
|
|
|
data = Deposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
172
|
|
|
return datetime_to_timestamps(data, float) |
173
|
|
|
elif metric == 'deposits_cash': |
174
|
|
|
data = CashDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
175
|
|
|
return datetime_to_timestamps(data, float) |
176
|
|
|
elif metric == 'deposits_btc': |
177
|
|
|
data = BTCDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
178
|
|
|
return datetime_to_timestamps(data, float) |
179
|
|
|
else: |
180
|
|
|
raise(InvalidMetric(metric)) |
181
|
|
|
|
182
|
|
|
|
183
|
|
|
### |
184
|
|
|
### "Each" functions. So "monday", "tuesday", etc. instead of 2014-07-21 |
185
|
|
|
### |
186
|
|
|
|
187
|
|
|
month_each_mapping = [(i, datetime.date(2000,i,1).strftime('%B')) for i in range(1,13)] |
188
|
|
|
|
189
|
|
|
day_each_mapping = [(i, '{:02}'.format(i)) for i in range(0,31)] |
190
|
|
|
|
191
|
|
|
weekday_each_mapping = [(6, 'Sunday'), (0, 'Monday'), (1, 'Tuesday'), |
192
|
|
|
(2, 'Wednesday'), (3, 'Thursday'), (4, 'Friday'), |
193
|
|
|
(5, 'Saturday')] |
194
|
|
|
|
195
|
|
|
hour_each_mapping = [(i, '{0:02}:00-{0:02}:59'.format(i)) for i in range(0,24)] |
196
|
|
|
|
197
|
|
|
|
198
|
|
|
def create_x_y_from_group_each(group, mapping, start, end, process_output=lambda x: x, default=0): |
199
|
|
|
x = [] |
200
|
|
|
y = [] |
201
|
|
|
|
202
|
|
|
for d in mapping: |
203
|
|
|
# Put the x axis label in the x array |
204
|
|
|
x.append(d[1]) |
205
|
|
|
|
206
|
|
|
if d[0] in group: |
207
|
|
|
# We have a reading for this particular time unit |
208
|
|
|
y.append(process_output(group[d[0]])) |
209
|
|
|
else: |
210
|
|
|
y.append(default) |
211
|
|
|
|
212
|
|
|
return x,y |
213
|
|
|
|
214
|
|
|
|
215
|
|
|
# Get data about each something. So each weekday, or each hour |
216
|
|
|
# |
217
|
|
|
# metric: 'items', 'sales', or 'deposits' |
218
|
|
|
# each: 'day_each' or 'hour_each' |
219
|
|
|
def admin_data_each_range(start, end, metric, each): |
220
|
|
|
if each == 'month_each': |
221
|
|
|
mapping = month_each_mapping |
222
|
|
|
elif each == 'day_each': |
223
|
|
|
mapping = day_each_mapping |
224
|
|
|
elif each == 'weekday_each': |
225
|
|
|
mapping = weekday_each_mapping |
226
|
|
|
elif each == 'hour_each': |
227
|
|
|
mapping = hour_each_mapping |
228
|
|
|
|
229
|
|
|
if metric == 'items': |
230
|
|
|
data = PurchaseLineItem.quantity_by_period(each, start=ftz(start), end=ftz(end)) |
231
|
|
|
return zip(create_x_y_from_group_each(data, mapping, start, end)) |
232
|
|
|
elif metric == 'sales': |
|
|
|
|
233
|
|
|
data = PurchaseLineItem.virtual_revenue_by_period(each, start=ftz(start), end=ftz(end)) |
234
|
|
|
return zip(create_x_y_from_group_each(data, mapping, start, end, float, 0.0)) |
235
|
|
|
elif metric == 'deposits': |
236
|
|
|
data = Deposit.deposits_by_period(each, start=ftz(start), end=ftz(end)) |
237
|
|
|
return zip(create_x_y_from_group_each(data, mapping, start, end, float, 0.0)) |
238
|
|
|
else: |
239
|
|
|
raise(InvalidMetric(metric)) |
240
|
|
|
|
241
|
|
|
|
242
|
|
|
def admin_data_each(num_days, metric, each): |
243
|
|
|
return admin_data_each_range(get_start(num_days), get_end(), metric, each) |
244
|
|
|
|
245
|
|
|
|
246
|
|
|
|
247
|
|
|
def create_json(request, metric, period): |
248
|
|
|
try: |
249
|
|
|
if 'days' in request.GET: |
250
|
|
|
num_days = int(request.GET['days']) |
251
|
|
|
else: |
252
|
|
|
num_days = 0 |
253
|
|
|
if 'each' in period: |
254
|
|
|
x,y = admin_data_each(num_days, metric, period) |
255
|
|
|
else: |
256
|
|
|
x,y = admin_data_period(num_days, metric, period) |
257
|
|
|
return {'x': x, |
258
|
|
|
'y': y, |
259
|
|
|
'num_days': num_days or 'all'} |
260
|
|
|
except ValueError: |
261
|
|
|
return {'status': 'error'} |
262
|
|
|
except utility.InvalidGroupPeriod as e: |
263
|
|
|
return {'status': 'error', |
264
|
|
|
'message': 'Invalid period for grouping data: {}'.format(e)} |
265
|
|
|
except InvalidMetric as e: |
266
|
|
|
return {'status': 'error', |
267
|
|
|
'message': 'Invalid metric for requesting data: {}'.format(e)} |
268
|
|
|
except Exception as e: |
269
|
|
|
if request.debug: raise(e) |
270
|
|
|
return {'status': 'error'} |
271
|
|
|
|
272
|
|
|
|
273
|
|
|
def create_highcharts_json(request, metric, period): |
274
|
|
|
try: |
275
|
|
|
return admin_data_highcharts_period(metric, period) |
276
|
|
|
except ValueError: |
277
|
|
|
return {'status': 'error'} |
278
|
|
|
except utility.InvalidGroupPeriod as e: |
279
|
|
|
return {'status': 'error', |
280
|
|
|
'message': 'Invalid period for grouping data: {}'.format(e)} |
281
|
|
|
except InvalidMetric as e: |
282
|
|
|
return {'status': 'error', |
283
|
|
|
'message': 'Invalid metric for requesting data: {}'.format(e)} |
284
|
|
|
except Exception as e: |
285
|
|
|
if request.debug: raise(e) |
286
|
|
|
return {'status': 'error'} |
287
|
|
|
|
288
|
|
|
def create_dict_to_date(metric, period): |
289
|
|
|
now = datetime.date.today() |
290
|
|
|
|
291
|
|
|
if period == 'month': |
292
|
|
|
start = arrow.get(datetime.date(now.year, now.month, 1)) |
293
|
|
|
elif period == 'year': |
294
|
|
|
start = arrow.get(datetime.date(now.year, 1, 1)) |
295
|
|
|
|
296
|
|
|
xs,ys = admin_data_period_range(start, get_end(), metric, period) |
297
|
|
|
|
298
|
|
|
return {'xs': xs, |
299
|
|
|
'ys': ys} |
300
|
|
|
|
301
|
|
|
|
302
|
|
|
def create_dict(metric, period, num_days): |
303
|
|
|
if 'each' in period: |
304
|
|
|
xs,ys = admin_data_each(num_days, metric, period) |
305
|
|
|
else: |
306
|
|
|
xs,ys = admin_data_period(num_days, metric, period) |
307
|
|
|
|
308
|
|
|
return {'xs': xs, |
309
|
|
|
'ys': ys, |
310
|
|
|
'avg': [sum(y)/len(y) for y in ys], |
311
|
|
|
'avg_hack': [[sum(y)/len(y)]*len(y) for y in ys], |
312
|
|
|
'num_days': num_days or 'all'} |
313
|
|
|
|
314
|
|
|
|
315
|
|
|
# Get a list of timestamps and the number of a particular item that was sold |
316
|
|
|
# at that time. |
317
|
|
|
def create_item_sales_json(request, item_id): |
318
|
|
|
sales = PurchaseLineItem.item_sale_quantities(item_id) |
319
|
|
|
|
320
|
|
|
individual = [] |
321
|
|
|
totals = [] |
322
|
|
|
total = 0 |
323
|
|
|
for s in sales: |
324
|
|
|
tstamp = s[1].timestamp.timestamp*1000 |
325
|
|
|
individual.append((tstamp, s[0].quantity)) |
326
|
|
|
total += s[0].quantity |
327
|
|
|
totals.append((tstamp, total)) |
328
|
|
|
|
329
|
|
|
return {'individual': individual, |
330
|
|
|
'sum': totals} |
331
|
|
|
|
332
|
|
|
####### |
333
|
|
|
### Calculate the speed of sale for all items |
334
|
|
|
|
335
|
|
|
# We are going to do this over all time and over the last 30 days |
336
|
|
|
|
337
|
|
|
# Returns a dict of {item_num -> {number of days -> sale speed}} |
338
|
|
|
def item_sale_speed(num_days, only_item_id=None): |
339
|
|
|
# TODO: If we're only looking for one item (only_item_id), this can probably |
340
|
|
|
# be made more efficient |
341
|
|
|
|
342
|
|
|
# First we need to figure out when each item was in stock and when it wasn't. |
343
|
|
|
# I don't know what the best way to do this is. I think the easiest way is |
344
|
|
|
# to look at the in_stock column in the item_history table and figure it |
345
|
|
|
# out from there. |
346
|
|
|
|
347
|
|
|
# Start by getting all item change events for the last thirty days |
348
|
|
|
data = {} |
349
|
|
|
|
350
|
|
|
data_onsale = {} |
351
|
|
|
|
352
|
|
|
start = get_start(num_days) |
353
|
|
|
start_datetime = arrow.get(datetime.datetime(start.year, start.month, start.day)) |
354
|
|
|
|
355
|
|
|
start_padding = get_start(num_days*3) |
356
|
|
|
start_str = start_padding.strftime('%Y-%m-%d 0:0') |
357
|
|
|
# This gets a little hairy b/c we circumvent sqlalchemy here. This means |
358
|
|
|
# that timestamps aren't automatically converted into arrow objects, so we |
359
|
|
|
# have to do it ourselves everywhere we access them |
360
|
|
|
items = DBSession.execute("SELECT * FROM items_history\ |
361
|
|
|
WHERE item_changed_at>'{}'\ |
362
|
|
|
ORDER BY item_changed_at ASC".format(start_str)) |
363
|
|
|
|
364
|
|
|
# Calculate the number of days in the interval the item was in stock |
365
|
|
|
for item in items: |
366
|
|
|
status = item.in_stock>0 |
367
|
|
|
|
368
|
|
|
item_changed_at = arrow.get(item.item_changed_at) |
369
|
|
|
|
370
|
|
|
if item.id not in data_onsale: |
371
|
|
|
data_onsale[item.id] = {'days_on_sale': 0, |
372
|
|
|
'date_in_stock': None, |
373
|
|
|
'num_sold': 0} |
374
|
|
|
|
375
|
|
|
if item_changed_at < start_datetime: |
376
|
|
|
# We need to figure out if the item started in stock at the |
377
|
|
|
# beginning of the time period. |
378
|
|
|
if status == True: |
379
|
|
|
data_onsale[item.id]['date_in_stock'] = start_datetime |
380
|
|
|
else: |
381
|
|
|
data_onsale[item.id]['date_in_stock'] = None |
382
|
|
|
|
383
|
|
|
elif (status == True) and (data_onsale[item.id]['date_in_stock'] == None): |
384
|
|
|
# item is in stock now and wasn't before |
385
|
|
|
data_onsale[item.id]['date_in_stock'] = item_changed_at |
386
|
|
|
|
387
|
|
|
elif (status == False) and (data_onsale[item.id]['date_in_stock'] != None): |
388
|
|
|
# Item is now out of stock |
389
|
|
|
|
390
|
|
|
# calculate time difference |
391
|
|
|
tdelta = item_changed_at - data_onsale[item.id]['date_in_stock'] |
392
|
|
|
data_onsale[item.id]['days_on_sale'] += tdelta.days |
393
|
|
|
#print('{}: {}'.format(item.id, tdelta)) |
394
|
|
|
|
395
|
|
|
data_onsale[item.id]['date_in_stock'] = None |
396
|
|
|
|
397
|
|
|
for item_id,item_data in data_onsale.items(): |
398
|
|
|
if item_data['date_in_stock'] != None: |
399
|
|
|
tdelta = arrow.now() - item_data['date_in_stock'] |
400
|
|
|
item_data['days_on_sale'] += tdelta.days |
401
|
|
|
#print('{}: {}'.format(item_id, tdelta.days)) |
402
|
|
|
|
403
|
|
|
|
404
|
|
|
# Calculate the number of items sold during the period |
405
|
|
|
purchases = DBSession.query(PurchaseLineItem)\ |
406
|
|
|
.join(Transaction)\ |
407
|
|
|
.join(Event)\ |
408
|
|
|
.filter(Event.deleted==False)\ |
409
|
|
|
.filter(Event.timestamp>start) |
410
|
|
|
for purchase in purchases: |
411
|
|
|
item_id = purchase.item_id |
412
|
|
|
quantity = purchase.quantity |
413
|
|
|
data_onsale[item_id]['num_sold'] += quantity |
414
|
|
|
|
415
|
|
|
|
416
|
|
|
# Calculate rate, finally |
417
|
|
|
for itemid,item_data in data_onsale.items(): |
418
|
|
|
if item_data['days_on_sale'] == 0: |
419
|
|
|
data[itemid] = 0 |
420
|
|
|
continue |
421
|
|
|
data[itemid] = item_data['num_sold'] / item_data['days_on_sale'] |
422
|
|
|
|
423
|
|
|
if only_item_id: |
424
|
|
|
if only_item_id in data: |
425
|
|
|
return data[only_item_id] |
426
|
|
|
else: |
427
|
|
|
return 0 |
428
|
|
|
else: |
429
|
|
|
return data |
430
|
|
|
|
431
|
|
|
|
432
|
|
|
|
433
|
|
|
|
434
|
|
|
@view_config(route_name='admin_data_items_json', |
435
|
|
|
renderer='json', |
436
|
|
|
permission='manage') |
437
|
|
|
def admin_data_items_json(request): |
438
|
|
|
return create_json(request, 'items', request.matchdict['period']) |
439
|
|
|
|
440
|
|
|
|
441
|
|
|
@view_config(route_name='admin_data_sales_json', |
442
|
|
|
renderer='json', |
443
|
|
|
permission='manage') |
444
|
|
|
def admin_data_sales_json(request): |
445
|
|
|
return create_json(request, 'sales', request.matchdict['period']) |
446
|
|
|
|
447
|
|
|
|
448
|
|
|
@view_config(route_name='admin_data_json_highcharts', |
449
|
|
|
renderer='json', |
450
|
|
|
permission='manage') |
451
|
|
|
def admin_data_json_highcharts(request): |
452
|
|
|
return create_highcharts_json(request, request.matchdict['metric'], request.matchdict['period']) |
453
|
|
|
|
454
|
|
|
|
455
|
|
|
@view_config(route_name='admin_data_deposits_json', |
456
|
|
|
renderer='json', |
457
|
|
|
permission='manage') |
458
|
|
|
def admin_data_deposits_json(request): |
459
|
|
|
return create_json(request, 'deposits', request.matchdict['period']) |
460
|
|
|
|
461
|
|
|
|
462
|
|
|
@view_config(route_name='admin_data_items_each_json', |
463
|
|
|
renderer='json', |
464
|
|
|
permission='manage') |
465
|
|
|
def admin_data_items_each_json(request): |
466
|
|
|
return create_json(request, 'items', request.matchdict['period']+'_each') |
467
|
|
|
|
468
|
|
|
|
469
|
|
|
@view_config(route_name='admin_data_sales_each_json', |
470
|
|
|
renderer='json', |
471
|
|
|
permission='manage') |
472
|
|
|
def admin_data_sales_each_json(request): |
473
|
|
|
return create_json(request, 'sales', request.matchdict['period']+'_each') |
474
|
|
|
|
475
|
|
|
|
476
|
|
|
@view_config(route_name='admin_data_deposits_each_json', |
477
|
|
|
renderer='json', |
478
|
|
|
permission='manage') |
479
|
|
|
def admin_data_deposits_each_json(request): |
480
|
|
|
return create_json(request, 'deposits', request.matchdict['period']+'_each') |
481
|
|
|
|
482
|
|
|
|
483
|
|
|
# All of the sale dates and quantities of a particular item |
484
|
|
|
@view_config(route_name='admin_data_item_sales_json', |
485
|
|
|
renderer='json', |
486
|
|
|
permission='manage') |
487
|
|
|
def admin_data_item_sales_json(request): |
488
|
|
|
return create_item_sales_json(request, request.matchdict['item_id']) |
489
|
|
|
|
490
|
|
|
|
491
|
|
|
# Timestamps and the number of total users |
492
|
|
|
@view_config(route_name='admin_data_users_totals_json', |
493
|
|
|
renderer='json', |
494
|
|
|
permission='manage') |
495
|
|
|
def admin_data_users_totals_json(request): |
496
|
|
|
return User.get_user_count_cumulative() |
497
|
|
|
|
498
|
|
|
|
499
|
|
|
# Timestamps and user debt, bank balance, debt/# users in debt |
500
|
|
|
@view_config(route_name='admin_data_users_balance_totals_json', |
501
|
|
|
renderer='json', |
502
|
|
|
permission='manage') |
503
|
|
|
def admin_data_users_balance_totals_json(request): |
504
|
|
|
return Transaction.get_balance_total_daily() |
505
|
|
|
|
506
|
|
|
|
507
|
|
|
# Timestamps and balance for a specific user over time |
508
|
|
|
@view_config(route_name='admin_data_user_balance_json', |
509
|
|
|
renderer='json', |
510
|
|
|
permission='manage') |
511
|
|
|
def admin_data_user_balance_json(request): |
512
|
|
|
user = User.from_id(request.matchdict['user_id']) |
513
|
|
|
return Transaction.get_balances_over_time_for_user(user) |
514
|
|
|
|
515
|
|
|
|
516
|
|
|
# # Timestamps and user debt, "bank balance", debt/user |
517
|
|
|
# @view_config(route_name='admin_data_users_balance_totals_percapita_json', |
518
|
|
|
# renderer='json', |
519
|
|
|
# permission='manage') |
520
|
|
|
# def admin_data_users_balance_totals_percapita_json(request): |
521
|
|
|
# debt = Transaction.get_balance_total_daily() |
522
|
|
|
# users = User.get_user_count_cumulative() |
523
|
|
|
|
524
|
|
|
# di = 0 |
525
|
|
|
# ui = 0 |
526
|
|
|
# next_user_time = users[ui][0] |
527
|
|
|
# user_count = users[ui][1] |
528
|
|
|
# out = [] |
529
|
|
|
|
530
|
|
|
# for rec in debt: |
531
|
|
|
# timestamp = rec[0] |
532
|
|
|
# debt = rec[1] |
533
|
|
|
# balance = rec[2] |
534
|
|
|
|
535
|
|
|
# # Look for the correct number of users |
536
|
|
|
# while timestamp > next_user_time: |
537
|
|
|
# ui += 1 |
538
|
|
|
# if ui >= len(users): |
539
|
|
|
# break |
540
|
|
|
# next_user_time = users[ui][0] |
541
|
|
|
# user_count = users[ui][1] |
542
|
|
|
|
543
|
|
|
# debt_per_capita = debt/user_count |
544
|
|
|
|
545
|
|
|
# out.append((timestamp, debt, balance, debt_per_capita)) |
546
|
|
|
|
547
|
|
|
# return out |
548
|
|
|
|
549
|
|
|
|
550
|
|
|
@view_config(route_name='admin_data_speed_items', |
551
|
|
|
renderer='json', |
552
|
|
|
permission='manage') |
553
|
|
|
def admin_data_speed_items(request): |
554
|
|
|
return item_sale_speed(30) |
555
|
|
|
|
556
|
|
|
|
Duplicated code is one of the most pungent code smells. If you need to duplicate the same code in three or more different places, we strongly encourage you to look into extracting the code into a single class or operation.
You can also find more detailed suggestions in the “Code” section of your repository.