1 | from pyramid.events import subscriber |
||
2 | from pyramid.events import BeforeRender |
||
3 | from pyramid.httpexceptions import HTTPFound |
||
4 | from pyramid.renderers import render |
||
5 | from pyramid.renderers import render_to_response |
||
6 | from pyramid.response import Response |
||
7 | from pyramid.response import FileResponse |
||
8 | from pyramid.view import view_config, forbidden_view_config |
||
9 | |||
10 | from sqlalchemy.sql import func |
||
11 | from sqlalchemy.exc import DBAPIError |
||
12 | from sqlalchemy.orm.exc import NoResultFound |
||
13 | |||
14 | from .models import * |
||
15 | from .models.model import * |
||
16 | from .models import user as __user |
||
17 | from .models.user import User |
||
18 | from .models.item import Item |
||
19 | from .models.box import Box |
||
20 | from .models.box_item import BoxItem |
||
21 | from .models.transaction import Transaction, Deposit, CashDeposit, BTCDeposit |
||
22 | from .models.transaction import PurchaseLineItem, SubTransaction |
||
23 | from .models.account import Account, VirtualAccount, CashAccount |
||
24 | from .models.event import Event |
||
25 | from .models import event as __event |
||
26 | from .models.vendor import Vendor |
||
27 | from .models.item_vendor import ItemVendor |
||
28 | from .models.request import Request |
||
29 | from .models.announcement import Announcement |
||
30 | from .models.btcdeposit import BtcPendingDeposit |
||
31 | from .models.receipt import Receipt |
||
32 | |||
33 | from pyramid.security import Allow, Everyone, remember, forget |
||
34 | |||
35 | import chezbetty.datalayer as datalayer |
||
36 | from .btc import Bitcoin, BTCException |
||
37 | |||
38 | # Used for generating barcodes |
||
39 | from reportlab.graphics.barcode import code39 |
||
40 | from reportlab.graphics.barcode import code93 |
||
41 | from reportlab.lib.pagesizes import letter |
||
42 | from reportlab.lib.units import mm, inch |
||
43 | from reportlab.pdfgen import canvas |
||
44 | |||
45 | from . import utility |
||
46 | import arrow |
||
47 | |||
48 | class InvalidMetric(Exception): |
||
49 | pass |
||
50 | |||
51 | # fix_timezone |
||
52 | def ftz(i): |
||
53 | return i |
||
54 | #if type(i) is datetime.date: |
||
55 | # i = datetime.datetime(i.year, i.month, i.day) |
||
56 | #return pytz.timezone('America/Detroit').localize(i).astimezone(tz=pytz.timezone('UTC')) |
||
57 | |||
58 | |||
59 | def get_start(days): |
||
60 | if days: |
||
61 | # "now" is really midnight tonight, so we really want tomorrows date. |
||
62 | # This makes the comparisons and math work so 1 day would mean today |
||
63 | now = arrow.utcnow() + datetime.timedelta(days=1) |
||
64 | delta = datetime.timedelta(days=days) |
||
65 | return now - delta |
||
66 | else: |
||
67 | # Hard code in when Betty started |
||
68 | return arrow.get(datetime.date(year=2014, month=7, day=8)) |
||
69 | |||
70 | def get_end(): |
||
71 | return arrow.utcnow() + datetime.timedelta(days=1) |
||
72 | |||
73 | |||
74 | def create_x_y_from_group(group, start, end, period, process_output=lambda x: x, default=0): |
||
75 | x = [] |
||
76 | y = [] |
||
77 | |||
78 | if period == 'year': |
||
79 | dt = datetime.timedelta(days=365) |
||
80 | fmt_str = '{}' |
||
81 | elif period == 'month': |
||
82 | dt = datetime.timedelta(days=30) |
||
83 | fmt_str = '{}-{:02}' |
||
84 | elif period == 'day': |
||
85 | dt = datetime.timedelta(days=1) |
||
86 | fmt_str = '{}-{:02}-{:02}' |
||
87 | |||
88 | # Apparently this is a copy operation |
||
89 | if start == datetime.date.min: |
||
90 | ptr = group[0][0] |
||
91 | else: |
||
92 | ptr = start |
||
93 | |||
94 | for d,total in group: |
||
95 | # Fill in days with no data |
||
96 | while ptr < arrow.get(datetime.date(d.year, d.month, d.day)): |
||
97 | x.append(fmt_str.format(ptr.year, ptr.month, ptr.day)) |
||
98 | y.append(default) |
||
99 | ptr += dt |
||
100 | |||
101 | x.append(fmt_str.format(d.year, d.month, d.day)) |
||
102 | y.append(process_output(total)) |
||
103 | |||
104 | ptr += dt |
||
105 | |||
106 | # Fill in the end |
||
107 | while ptr < end: |
||
108 | x.append(fmt_str.format(ptr.year, ptr.month, ptr.day)) |
||
109 | y.append(default) |
||
110 | ptr += dt |
||
111 | return x,y |
||
112 | |||
113 | def datetime_to_timestamps (data, process_output=lambda x: x): |
||
114 | out = [] |
||
115 | for d in data: |
||
116 | t = arrow.get( |
||
117 | datetime.datetime( |
||
118 | year=d[0].year, |
||
119 | month=d[0].month, |
||
120 | day=d[0].day, |
||
121 | hour=12, |
||
122 | ) |
||
123 | ).timestamp * 1000 |
||
124 | #t = round(datetime.datetime(year=d[0].year, month=d[0].month, day=d[0].day, hour=12)\ |
||
125 | # .replace(tzinfo=datetime.timezone.utc).timestamp()*1000) |
||
126 | # t = round(datetime.datetime.combine(d[0], datetime.datetime.min.time())\ |
||
127 | # .replace(tzinfo=datetime.timezone.utc).timestamp()*1000) |
||
128 | out.append((t, process_output(d[1]))) |
||
129 | return out |
||
130 | |||
131 | |||
132 | # Get x,y for some data metric |
||
133 | # |
||
134 | # start: datetime.datetime that all data must be at or after |
||
135 | # end: datetime.datetime that all data must be before |
||
136 | # metric: 'items', 'sales', or 'deposits' |
||
137 | # period: 'day', 'month', or 'year' |
||
138 | View Code Duplication | def admin_data_period_range(start, end, metric, period): |
|
0 ignored issues
–
show
Duplication
introduced
by
![]() |
|||
139 | if metric == 'items': |
||
140 | data = PurchaseLineItem.quantity_by_period(period, start=ftz(start), end=ftz(end)) |
||
141 | return zip(create_x_y_from_group(data, start, end, period)) |
||
142 | elif metric == 'sales': |
||
143 | data = PurchaseLineItem.virtual_revenue_by_period(period, start=ftz(start), end=ftz(end)) |
||
144 | return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
||
145 | elif metric == 'deposits': |
||
146 | data = Deposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
||
147 | return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
||
148 | elif metric == 'deposits_cash': |
||
149 | data = CashDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
||
150 | return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
||
151 | elif metric == 'deposits_btc': |
||
152 | data = BTCDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
||
153 | return zip(create_x_y_from_group(data, start, end, period, float, 0.0)) |
||
154 | else: |
||
155 | raise(InvalidMetric(metric)) |
||
156 | |||
157 | |||
158 | def admin_data_period(num_days, metric, period): |
||
159 | return admin_data_period_range(get_start(num_days), get_end(), metric, period) |
||
160 | |||
161 | View Code Duplication | def admin_data_highcharts_period(metric, period): |
|
0 ignored issues
–
show
|
|||
162 | start = get_start(0) |
||
163 | end = get_end() |
||
164 | if metric == 'items': |
||
165 | data = PurchaseLineItem.quantity_by_period(period, start=ftz(start), end=ftz(end)) |
||
166 | return datetime_to_timestamps(data) |
||
167 | elif metric == 'sales': |
||
168 | data = PurchaseLineItem.virtual_revenue_by_period(period, start=ftz(start), end=ftz(end)) |
||
169 | return datetime_to_timestamps(data, float) |
||
170 | elif metric == 'deposits': |
||
171 | data = Deposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
||
172 | return datetime_to_timestamps(data, float) |
||
173 | elif metric == 'deposits_cash': |
||
174 | data = CashDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
||
175 | return datetime_to_timestamps(data, float) |
||
176 | elif metric == 'deposits_btc': |
||
177 | data = BTCDeposit.deposits_by_period('day', start=ftz(start), end=ftz(end)) |
||
178 | return datetime_to_timestamps(data, float) |
||
179 | else: |
||
180 | raise(InvalidMetric(metric)) |
||
181 | |||
182 | |||
183 | ### |
||
184 | ### "Each" functions. So "monday", "tuesday", etc. instead of 2014-07-21 |
||
185 | ### |
||
186 | |||
187 | month_each_mapping = [(i, datetime.date(2000,i,1).strftime('%B')) for i in range(1,13)] |
||
188 | |||
189 | day_each_mapping = [(i, '{:02}'.format(i)) for i in range(0,31)] |
||
190 | |||
191 | weekday_each_mapping = [(6, 'Sunday'), (0, 'Monday'), (1, 'Tuesday'), |
||
192 | (2, 'Wednesday'), (3, 'Thursday'), (4, 'Friday'), |
||
193 | (5, 'Saturday')] |
||
194 | |||
195 | hour_each_mapping = [(i, '{0:02}:00-{0:02}:59'.format(i)) for i in range(0,24)] |
||
196 | |||
197 | |||
198 | def create_x_y_from_group_each(group, mapping, start, end, process_output=lambda x: x, default=0): |
||
199 | x = [] |
||
200 | y = [] |
||
201 | |||
202 | for d in mapping: |
||
203 | # Put the x axis label in the x array |
||
204 | x.append(d[1]) |
||
205 | |||
206 | if d[0] in group: |
||
207 | # We have a reading for this particular time unit |
||
208 | y.append(process_output(group[d[0]])) |
||
209 | else: |
||
210 | y.append(default) |
||
211 | |||
212 | return x,y |
||
213 | |||
214 | |||
215 | # Get data about each something. So each weekday, or each hour |
||
216 | # |
||
217 | # metric: 'items', 'sales', or 'deposits' |
||
218 | # each: 'day_each' or 'hour_each' |
||
219 | def admin_data_each_range(start, end, metric, each): |
||
220 | if each == 'month_each': |
||
221 | mapping = month_each_mapping |
||
222 | elif each == 'day_each': |
||
223 | mapping = day_each_mapping |
||
224 | elif each == 'weekday_each': |
||
225 | mapping = weekday_each_mapping |
||
226 | elif each == 'hour_each': |
||
227 | mapping = hour_each_mapping |
||
228 | |||
229 | if metric == 'items': |
||
230 | data = PurchaseLineItem.quantity_by_period(each, start=ftz(start), end=ftz(end)) |
||
231 | return zip(create_x_y_from_group_each(data, mapping, start, end)) |
||
232 | elif metric == 'sales': |
||
233 | data = PurchaseLineItem.virtual_revenue_by_period(each, start=ftz(start), end=ftz(end)) |
||
234 | return zip(create_x_y_from_group_each(data, mapping, start, end, float, 0.0)) |
||
235 | elif metric == 'deposits': |
||
236 | data = Deposit.deposits_by_period(each, start=ftz(start), end=ftz(end)) |
||
237 | return zip(create_x_y_from_group_each(data, mapping, start, end, float, 0.0)) |
||
238 | else: |
||
239 | raise(InvalidMetric(metric)) |
||
240 | |||
241 | |||
242 | def admin_data_each(num_days, metric, each): |
||
243 | return admin_data_each_range(get_start(num_days), get_end(), metric, each) |
||
244 | |||
245 | |||
246 | |||
247 | def create_json(request, metric, period): |
||
248 | try: |
||
249 | if 'days' in request.GET: |
||
250 | num_days = int(request.GET['days']) |
||
251 | else: |
||
252 | num_days = 0 |
||
253 | if 'each' in period: |
||
254 | x,y = admin_data_each(num_days, metric, period) |
||
255 | else: |
||
256 | x,y = admin_data_period(num_days, metric, period) |
||
257 | return {'x': x, |
||
258 | 'y': y, |
||
259 | 'num_days': num_days or 'all'} |
||
260 | except ValueError: |
||
261 | return {'status': 'error'} |
||
262 | except utility.InvalidGroupPeriod as e: |
||
263 | return {'status': 'error', |
||
264 | 'message': 'Invalid period for grouping data: {}'.format(e)} |
||
265 | except InvalidMetric as e: |
||
266 | return {'status': 'error', |
||
267 | 'message': 'Invalid metric for requesting data: {}'.format(e)} |
||
268 | except Exception as e: |
||
269 | if request.debug: raise(e) |
||
270 | return {'status': 'error'} |
||
271 | |||
272 | |||
273 | def create_highcharts_json(request, metric, period): |
||
274 | try: |
||
275 | return admin_data_highcharts_period(metric, period) |
||
276 | except ValueError: |
||
277 | return {'status': 'error'} |
||
278 | except utility.InvalidGroupPeriod as e: |
||
279 | return {'status': 'error', |
||
280 | 'message': 'Invalid period for grouping data: {}'.format(e)} |
||
281 | except InvalidMetric as e: |
||
282 | return {'status': 'error', |
||
283 | 'message': 'Invalid metric for requesting data: {}'.format(e)} |
||
284 | except Exception as e: |
||
285 | if request.debug: raise(e) |
||
286 | return {'status': 'error'} |
||
287 | |||
288 | def create_dict_to_date(metric, period): |
||
289 | now = datetime.date.today() |
||
290 | |||
291 | if period == 'month': |
||
292 | start = arrow.get(datetime.date(now.year, now.month, 1)) |
||
293 | elif period == 'year': |
||
294 | start = arrow.get(datetime.date(now.year, 1, 1)) |
||
295 | |||
296 | xs,ys = admin_data_period_range(start, get_end(), metric, period) |
||
297 | |||
298 | return {'xs': xs, |
||
299 | 'ys': ys} |
||
300 | |||
301 | |||
302 | def create_dict(metric, period, num_days): |
||
303 | if 'each' in period: |
||
304 | xs,ys = admin_data_each(num_days, metric, period) |
||
305 | else: |
||
306 | xs,ys = admin_data_period(num_days, metric, period) |
||
307 | |||
308 | return {'xs': xs, |
||
309 | 'ys': ys, |
||
310 | 'avg': [sum(y)/len(y) for y in ys], |
||
311 | 'avg_hack': [[sum(y)/len(y)]*len(y) for y in ys], |
||
312 | 'num_days': num_days or 'all'} |
||
313 | |||
314 | |||
315 | # Get a list of timestamps and the number of a particular item that was sold |
||
316 | # at that time. |
||
317 | def create_item_sales_json(request, item_id): |
||
318 | sales = PurchaseLineItem.item_sale_quantities(item_id) |
||
319 | |||
320 | individual = [] |
||
321 | totals = [] |
||
322 | total = 0 |
||
323 | for s in sales: |
||
324 | tstamp = s[1].timestamp.timestamp*1000 |
||
325 | individual.append((tstamp, s[0].quantity)) |
||
326 | total += s[0].quantity |
||
327 | totals.append((tstamp, total)) |
||
328 | |||
329 | return {'individual': individual, |
||
330 | 'sum': totals} |
||
331 | |||
332 | ####### |
||
333 | ### Calculate the speed of sale for all items |
||
334 | |||
335 | # We are going to do this over all time and over the last 30 days |
||
336 | |||
337 | # Returns a dict of {item_num -> {number of days -> sale speed}} |
||
338 | def item_sale_speed(num_days, only_item_id=None): |
||
339 | # TODO: If we're only looking for one item (only_item_id), this can probably |
||
340 | # be made more efficient |
||
341 | |||
342 | # First we need to figure out when each item was in stock and when it wasn't. |
||
343 | # I don't know what the best way to do this is. I think the easiest way is |
||
344 | # to look at the in_stock column in the item_history table and figure it |
||
345 | # out from there. |
||
346 | |||
347 | # Start by getting all item change events for the last thirty days |
||
348 | data = {} |
||
349 | |||
350 | data_onsale = {} |
||
351 | |||
352 | start = get_start(num_days) |
||
353 | start_datetime = arrow.get(datetime.datetime(start.year, start.month, start.day)) |
||
354 | |||
355 | start_padding = get_start(num_days*3) |
||
356 | start_str = start_padding.strftime('%Y-%m-%d 0:0') |
||
357 | # This gets a little hairy b/c we circumvent sqlalchemy here. This means |
||
358 | # that timestamps aren't automatically converted into arrow objects, so we |
||
359 | # have to do it ourselves everywhere we access them |
||
360 | items = DBSession.execute("SELECT * FROM items_history\ |
||
361 | WHERE item_changed_at>'{}'\ |
||
362 | ORDER BY item_changed_at ASC".format(start_str)) |
||
363 | |||
364 | # Calculate the number of days in the interval the item was in stock |
||
365 | for item in items: |
||
366 | status = item.in_stock>0 |
||
367 | |||
368 | item_changed_at = arrow.get(item.item_changed_at) |
||
369 | |||
370 | if item.id not in data_onsale: |
||
371 | data_onsale[item.id] = {'days_on_sale': 0, |
||
372 | 'date_in_stock': None, |
||
373 | 'num_sold': 0} |
||
374 | |||
375 | if item_changed_at < start_datetime: |
||
376 | # We need to figure out if the item started in stock at the |
||
377 | # beginning of the time period. |
||
378 | if status == True: |
||
379 | data_onsale[item.id]['date_in_stock'] = start_datetime |
||
380 | else: |
||
381 | data_onsale[item.id]['date_in_stock'] = None |
||
382 | |||
383 | elif (status == True) and (data_onsale[item.id]['date_in_stock'] == None): |
||
384 | # item is in stock now and wasn't before |
||
385 | data_onsale[item.id]['date_in_stock'] = item_changed_at |
||
386 | |||
387 | elif (status == False) and (data_onsale[item.id]['date_in_stock'] != None): |
||
388 | # Item is now out of stock |
||
389 | |||
390 | # calculate time difference |
||
391 | tdelta = item_changed_at - data_onsale[item.id]['date_in_stock'] |
||
392 | data_onsale[item.id]['days_on_sale'] += tdelta.days |
||
393 | #print('{}: {}'.format(item.id, tdelta)) |
||
394 | |||
395 | data_onsale[item.id]['date_in_stock'] = None |
||
396 | |||
397 | for item_id,item_data in data_onsale.items(): |
||
398 | if item_data['date_in_stock'] != None: |
||
399 | tdelta = arrow.now() - item_data['date_in_stock'] |
||
400 | item_data['days_on_sale'] += tdelta.days |
||
401 | #print('{}: {}'.format(item_id, tdelta.days)) |
||
402 | |||
403 | |||
404 | # Calculate the number of items sold during the period |
||
405 | purchases = DBSession.query(PurchaseLineItem)\ |
||
406 | .join(Transaction)\ |
||
407 | .join(Event)\ |
||
408 | .filter(Event.deleted==False)\ |
||
409 | .filter(Event.timestamp>start) |
||
410 | for purchase in purchases: |
||
411 | item_id = purchase.item_id |
||
412 | quantity = purchase.quantity |
||
413 | |||
414 | # Not sure this check should be necessary, but just make sure |
||
415 | if item_id not in data_onsale: |
||
416 | data_onsale[item_id] = {'days_on_sale': 0, |
||
417 | 'date_in_stock': None, |
||
418 | 'num_sold': 0} |
||
419 | |||
420 | data_onsale[item_id]['num_sold'] += quantity |
||
421 | |||
422 | |||
423 | # Calculate rate, finally |
||
424 | for itemid,item_data in data_onsale.items(): |
||
425 | if item_data['days_on_sale'] == 0: |
||
426 | data[itemid] = 0 |
||
427 | continue |
||
428 | data[itemid] = item_data['num_sold'] / item_data['days_on_sale'] |
||
429 | |||
430 | if only_item_id: |
||
431 | if only_item_id in data: |
||
432 | return data[only_item_id] |
||
433 | else: |
||
434 | return 0 |
||
435 | else: |
||
436 | return data |
||
437 | |||
438 | |||
439 | ####### |
||
440 | ### Calculate a histogram of user balances |
||
441 | # |
||
442 | # This has a special feature where it counts 0.00 as its own special bin |
||
443 | def user_balance_histogram (): |
||
444 | bin_size = 5 # $5 |
||
445 | bins = {} |
||
446 | |||
447 | def to_bin (x): |
||
448 | if x == Decimal(0): |
||
449 | return 0 |
||
450 | start = int(bin_size * round(float(x)/bin_size)) |
||
451 | if start == 0: |
||
452 | start = 0.01 |
||
453 | return start |
||
454 | |||
455 | users = User.get_normal_users() |
||
456 | for user in users: |
||
457 | balance_bin = to_bin(user.balance) |
||
458 | if balance_bin not in bins: |
||
459 | bins[balance_bin] = 1 |
||
460 | else: |
||
461 | bins[balance_bin] += 1 |
||
462 | |||
463 | out = {} |
||
464 | |||
465 | out['raw'] = bins |
||
466 | |||
467 | last = None |
||
468 | x = [] |
||
469 | y = [] |
||
470 | for bin_start, count in sorted(bins.items()): |
||
471 | zero = False |
||
472 | |||
473 | # Handle near 0 special |
||
474 | if bin_start == 0: |
||
475 | zero = True |
||
476 | |||
477 | if bin_start == 0.01: |
||
478 | bin_start = 0 |
||
479 | |||
480 | # Fill in missing bins, if needed |
||
481 | if last != None and bin_start-last > bin_size: |
||
482 | for i in range(last+bin_size, bin_start, bin_size): |
||
483 | b = '{} to {}'.format(i, i+bin_size) |
||
484 | x.append(b) |
||
485 | y.append(0) |
||
486 | |||
487 | if zero: |
||
488 | b = '0' |
||
489 | else: |
||
490 | b = '{} to {}'.format(bin_start, bin_start+bin_size) |
||
491 | x.append(b) |
||
492 | y.append(count) |
||
493 | |||
494 | last = bin_start |
||
495 | |||
496 | out['x'] = x |
||
497 | out['y'] = y |
||
498 | |||
499 | return out |
||
500 | |||
501 | |||
502 | ####### |
||
503 | ### Calculate a histogram of user days since last purchase |
||
504 | # |
||
505 | # This has a special feature where it counts 0.00 as its own special bin |
||
506 | def user_dayssincepurchase_histogram (): |
||
507 | bin_size = 10 # days |
||
508 | bins = {} |
||
509 | |||
510 | def to_bin (x): |
||
511 | if x == None: |
||
512 | return None |
||
513 | return int(bin_size * round(float(x)/bin_size)) |
||
514 | |||
515 | users = User.get_normal_users() |
||
516 | for user in users: |
||
517 | the_bin = to_bin(user.days_since_last_purchase) |
||
518 | if the_bin != None: |
||
519 | if the_bin not in bins: |
||
520 | bins[the_bin] = 1 |
||
521 | else: |
||
522 | bins[the_bin] += 1 |
||
523 | |||
524 | out = {} |
||
525 | |||
526 | out['raw'] = bins |
||
527 | |||
528 | last = None |
||
529 | x = [] |
||
530 | y = [] |
||
531 | for bin_start, count in sorted(bins.items()): |
||
532 | # Fill in missing bins, if needed |
||
533 | if last != None and bin_start-last > bin_size: |
||
534 | for i in range(last+bin_size, bin_start, bin_size): |
||
535 | b = '{} to {}'.format(i, i+bin_size) |
||
536 | x.append(b) |
||
537 | y.append(0) |
||
538 | |||
539 | b = '{} to {}'.format(bin_start, bin_start+bin_size) |
||
540 | x.append(b) |
||
541 | y.append(count) |
||
542 | |||
543 | last = bin_start |
||
544 | |||
545 | out['x'] = x |
||
546 | out['y'] = y |
||
547 | |||
548 | return out |
||
549 | |||
550 | |||
551 | ####### |
||
552 | ### Calculate a histogram of number of purchases by each user |
||
553 | # |
||
554 | # |
||
555 | def user_numberofpurchases_histogram (): |
||
556 | bins = {} |
||
557 | |||
558 | users = User.get_normal_users() |
||
559 | for user in users: |
||
560 | number_purchases = user.number_of_purchases |
||
561 | if number_purchases > 200: |
||
562 | number_purchases = 200 |
||
563 | |||
564 | if number_purchases not in bins: |
||
565 | bins[number_purchases] = 1 |
||
566 | else: |
||
567 | bins[number_purchases] += 1 |
||
568 | |||
569 | out = {} |
||
570 | |||
571 | last = None |
||
572 | x = [] |
||
573 | y = [] |
||
574 | for bin_start, count in sorted(bins.items()): |
||
575 | # Fill in missing bins, if needed |
||
576 | if last != None and bin_start-last > 1: |
||
577 | for i in range(last, bin_start): |
||
578 | b = '{}'.format(i) |
||
579 | x.append(b) |
||
580 | y.append(0) |
||
581 | |||
582 | b = '{}'.format(bin_start) |
||
583 | x.append(b) |
||
584 | y.append(count) |
||
585 | |||
586 | last = bin_start |
||
587 | |||
588 | out['x'] = x |
||
589 | out['y'] = y |
||
590 | |||
591 | return out |
||
592 | |||
593 | |||
594 | @view_config(route_name='admin_data_items_json', |
||
595 | renderer='json', |
||
596 | permission='manage') |
||
597 | def admin_data_items_json(request): |
||
598 | return create_json(request, 'items', request.matchdict['period']) |
||
599 | |||
600 | |||
601 | @view_config(route_name='admin_data_sales_json', |
||
602 | renderer='json', |
||
603 | permission='manage') |
||
604 | def admin_data_sales_json(request): |
||
605 | return create_json(request, 'sales', request.matchdict['period']) |
||
606 | |||
607 | |||
608 | @view_config(route_name='admin_data_json_highcharts', |
||
609 | renderer='json', |
||
610 | permission='manage') |
||
611 | def admin_data_json_highcharts(request): |
||
612 | return create_highcharts_json(request, request.matchdict['metric'], request.matchdict['period']) |
||
613 | |||
614 | |||
615 | @view_config(route_name='admin_data_deposits_json', |
||
616 | renderer='json', |
||
617 | permission='manage') |
||
618 | def admin_data_deposits_json(request): |
||
619 | return create_json(request, 'deposits', request.matchdict['period']) |
||
620 | |||
621 | |||
622 | @view_config(route_name='admin_data_items_each_json', |
||
623 | renderer='json', |
||
624 | permission='manage') |
||
625 | def admin_data_items_each_json(request): |
||
626 | return create_json(request, 'items', request.matchdict['period']+'_each') |
||
627 | |||
628 | |||
629 | @view_config(route_name='admin_data_sales_each_json', |
||
630 | renderer='json', |
||
631 | permission='manage') |
||
632 | def admin_data_sales_each_json(request): |
||
633 | return create_json(request, 'sales', request.matchdict['period']+'_each') |
||
634 | |||
635 | |||
636 | @view_config(route_name='admin_data_deposits_each_json', |
||
637 | renderer='json', |
||
638 | permission='manage') |
||
639 | def admin_data_deposits_each_json(request): |
||
640 | return create_json(request, 'deposits', request.matchdict['period']+'_each') |
||
641 | |||
642 | |||
643 | # All of the sale dates and quantities of a particular item |
||
644 | @view_config(route_name='admin_data_item_sales_json', |
||
645 | renderer='json', |
||
646 | permission='manage') |
||
647 | def admin_data_item_sales_json(request): |
||
648 | return create_item_sales_json(request, request.matchdict['item_id']) |
||
649 | |||
650 | |||
651 | # Timestamps and the number of total users |
||
652 | @view_config(route_name='admin_data_users_totals_json', |
||
653 | renderer='json', |
||
654 | permission='manage') |
||
655 | def admin_data_users_totals_json(request): |
||
656 | return User.get_user_count_cumulative() |
||
657 | |||
658 | |||
659 | # Timestamps and user debt, bank balance, debt/# users in debt |
||
660 | @view_config(route_name='admin_data_users_balance_totals_json', |
||
661 | renderer='json', |
||
662 | permission='manage') |
||
663 | def admin_data_users_balance_totals_json(request): |
||
664 | return Transaction.get_balance_total_daily() |
||
665 | |||
666 | |||
667 | # Timestamps and balance for a specific user over time |
||
668 | @view_config(route_name='admin_data_user_balance_json', |
||
669 | renderer='json', |
||
670 | permission='manage') |
||
671 | def admin_data_user_balance_json(request): |
||
672 | user = User.from_id(request.matchdict['user_id']) |
||
673 | return Transaction.get_balances_over_time_for_user(user) |
||
674 | |||
675 | |||
676 | # # Timestamps and user debt, "bank balance", debt/user |
||
677 | # @view_config(route_name='admin_data_users_balance_totals_percapita_json', |
||
678 | # renderer='json', |
||
679 | # permission='manage') |
||
680 | # def admin_data_users_balance_totals_percapita_json(request): |
||
681 | # debt = Transaction.get_balance_total_daily() |
||
682 | # users = User.get_user_count_cumulative() |
||
683 | |||
684 | # di = 0 |
||
685 | # ui = 0 |
||
686 | # next_user_time = users[ui][0] |
||
687 | # user_count = users[ui][1] |
||
688 | # out = [] |
||
689 | |||
690 | # for rec in debt: |
||
691 | # timestamp = rec[0] |
||
692 | # debt = rec[1] |
||
693 | # balance = rec[2] |
||
694 | |||
695 | # # Look for the correct number of users |
||
696 | # while timestamp > next_user_time: |
||
697 | # ui += 1 |
||
698 | # if ui >= len(users): |
||
699 | # break |
||
700 | # next_user_time = users[ui][0] |
||
701 | # user_count = users[ui][1] |
||
702 | |||
703 | # debt_per_capita = debt/user_count |
||
704 | |||
705 | # out.append((timestamp, debt, balance, debt_per_capita)) |
||
706 | |||
707 | # return out |
||
708 | |||
709 | |||
710 | @view_config(route_name='admin_data_speed_items', |
||
711 | renderer='json', |
||
712 | permission='manage') |
||
713 | def admin_data_speed_items(request): |
||
714 | return item_sale_speed(30) |
||
715 | |||
716 | |||
717 | @view_config(route_name='admin_data_histogram_balances', |
||
718 | renderer='json', |
||
719 | permission='manage') |
||
720 | def admin_data_histogram_balances(request): |
||
721 | return user_balance_histogram() |
||
722 | |||
723 | |||
724 | @view_config(route_name='admin_data_histogram_dayssincepurchase', |
||
725 | renderer='json', |
||
726 | permission='manage') |
||
727 | def admin_data_histogram_dayssincepurchase(request): |
||
728 | return user_dayssincepurchase_histogram() |
||
729 | |||
730 | |||
731 | @view_config(route_name='admin_data_histogram_numberofpurchases', |
||
732 | renderer='json', |
||
733 | permission='manage') |
||
734 | def admin_data_histogram_numberofpurchases(request): |
||
735 | return user_numberofpurchases_histogram() |
||
736 | |||
737 |