1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# This file is part of SENAITE.CORE |
4
|
|
|
# |
5
|
|
|
# Copyright 2018 by it's authors. |
6
|
|
|
# Some rights reserved. See LICENSE.rst, CONTRIBUTORS.rst. |
7
|
|
|
|
8
|
|
|
import collections |
9
|
|
|
import datetime |
10
|
|
|
import json |
11
|
|
|
from calendar import monthrange |
12
|
|
|
from operator import itemgetter |
13
|
|
|
from time import time |
14
|
|
|
|
15
|
|
|
from DateTime import DateTime |
16
|
|
|
from Products.Archetypes.public import DisplayList |
17
|
|
|
from Products.CMFCore.utils import getToolByName |
18
|
|
|
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile |
19
|
|
|
from bika.lims import bikaMessageFactory as _ |
20
|
|
|
from bika.lims import logger |
21
|
|
|
from bika.lims.api import get_tool |
22
|
|
|
from bika.lims.api import search |
23
|
|
|
from bika.lims.browser import BrowserView |
24
|
|
|
from bika.lims.catalog import CATALOG_ANALYSIS_LISTING |
25
|
|
|
from bika.lims.catalog import CATALOG_ANALYSIS_REQUEST_LISTING |
26
|
|
|
from bika.lims.catalog import CATALOG_WORKSHEET_LISTING |
27
|
|
|
from bika.lims.utils import get_strings |
28
|
|
|
from bika.lims.utils import get_unicode |
29
|
|
|
from plone import api |
30
|
|
|
from plone import protect |
31
|
|
|
from plone.api.exc import InvalidParameterError |
32
|
|
|
from plone.memoize import ram |
33
|
|
|
from plone.memoize import view as viewcache |
34
|
|
|
|
35
|
|
|
DASHBOARD_FILTER_COOKIE = 'dashboard_filter_cookie' |
36
|
|
|
|
37
|
|
|
# Supported periodicities for evolution charts |
38
|
|
|
PERIODICITY_DAILY = "d" |
39
|
|
|
PERIODICITY_WEEKLY = "w" |
40
|
|
|
PERIODICITY_MONTHLY = "m" |
41
|
|
|
PERIODICITY_QUARTERLY = "q" |
42
|
|
|
PERIODICITY_BIANNUAL = "b" |
43
|
|
|
PERIODICITY_YEARLY = "y" |
44
|
|
|
PERIODICITY_ALL = "a" |
45
|
|
|
|
46
|
|
|
|
47
|
|
|
def get_dashboard_registry_record(): |
48
|
|
|
""" |
49
|
|
|
Return the 'bika.lims.dashboard_panels_visibility' values. |
50
|
|
|
:return: A dictionary or None |
51
|
|
|
""" |
52
|
|
|
try: |
53
|
|
|
registry = api.portal.get_registry_record( |
54
|
|
|
'bika.lims.dashboard_panels_visibility') |
55
|
|
|
return registry |
56
|
|
|
except InvalidParameterError: |
57
|
|
|
# No entry in the registry for dashboard panels roles. |
58
|
|
|
# Maybe upgradestep 1.1.8 was not run? |
59
|
|
|
logger.warn("Cannot find a record with name " |
60
|
|
|
"'bika.lims.dashboard_panels_visibility' in " |
61
|
|
|
"registry_record. Missed upgrade 1.1.8?") |
62
|
|
|
return dict() |
63
|
|
|
|
64
|
|
|
|
65
|
|
|
def set_dashboard_registry_record(registry_info): |
66
|
|
|
""" |
67
|
|
|
Sets the 'bika.lims.dashboard_panels_visibility' values. |
68
|
|
|
|
69
|
|
|
:param registry_info: A dictionary type object with all its values as |
70
|
|
|
*unicode* objects. |
71
|
|
|
:return: A dictionary or None |
72
|
|
|
""" |
73
|
|
|
try: |
74
|
|
|
api.portal.set_registry_record( |
75
|
|
|
'bika.lims.dashboard_panels_visibility', registry_info) |
76
|
|
|
except InvalidParameterError: |
77
|
|
|
# No entry in the registry for dashboard panels roles. |
78
|
|
|
# Maybe upgradestep 1.1.8 was not run? |
79
|
|
|
logger.warn("Cannot find a record with name " |
80
|
|
|
"'bika.lims.dashboard_panels_visibility' in " |
81
|
|
|
"registry_record. Missed upgrade 1.1.8?") |
82
|
|
|
|
83
|
|
|
|
84
|
|
|
def setup_dashboard_panels_visibility_registry(section_name): |
85
|
|
|
""" |
86
|
|
|
Initializes the values for panels visibility in registry_records. By |
87
|
|
|
default, only users with LabManager or Manager roles can see the panels. |
88
|
|
|
:param section_name: |
89
|
|
|
:return: An string like: "role1,yes,role2,no,rol3,no" |
90
|
|
|
""" |
91
|
|
|
registry_info = get_dashboard_registry_record() |
92
|
|
|
role_permissions_list = [] |
93
|
|
|
# Getting roles defined in the system |
94
|
|
|
roles = [] |
95
|
|
|
acl_users = get_tool("acl_users") |
96
|
|
|
roles_tree = acl_users.portal_role_manager.listRoleIds() |
97
|
|
|
for role in roles_tree: |
98
|
|
|
roles.append(role) |
99
|
|
|
# Set view permissions to each role as 'yes': |
100
|
|
|
# "role1,yes,role2,no,rol3,no" |
101
|
|
|
for role in roles: |
102
|
|
|
role_permissions_list.append(role) |
103
|
|
|
visible = 'no' |
104
|
|
|
if role in ['LabManager', 'Manager']: |
105
|
|
|
visible = 'yes' |
106
|
|
|
role_permissions_list.append(visible) |
107
|
|
|
role_permissions = ','.join(role_permissions_list) |
108
|
|
|
|
109
|
|
|
# Set permissions string into dict |
110
|
|
|
registry_info[get_unicode(section_name)] = get_unicode(role_permissions) |
|
|
|
|
111
|
|
|
# Set new values to registry record |
112
|
|
|
set_dashboard_registry_record(registry_info) |
113
|
|
|
return registry_info |
114
|
|
|
|
115
|
|
|
|
116
|
|
|
def get_dashboard_panels_visibility_by_section(section_name): |
117
|
|
|
""" |
118
|
|
|
Return a list of pairs as values that represents the role-permission |
119
|
|
|
view relation for the panel section passed in. |
120
|
|
|
:param section_name: the panels section id. |
121
|
|
|
:return: a list of tuples. |
122
|
|
|
""" |
123
|
|
|
registry_info = get_dashboard_registry_record() |
124
|
|
|
if section_name not in registry_info: |
125
|
|
|
# Registry hasn't been set, do it at least for this section |
126
|
|
|
registry_info = \ |
127
|
|
|
setup_dashboard_panels_visibility_registry(section_name) |
128
|
|
|
|
129
|
|
|
pairs = registry_info.get(section_name) |
130
|
|
|
pairs = get_strings(pairs) |
131
|
|
|
if pairs is None: |
132
|
|
|
# In the registry, but with None value? |
133
|
|
|
setup_dashboard_panels_visibility_registry(section_name) |
134
|
|
|
return get_dashboard_panels_visibility_by_section(section_name) |
135
|
|
|
|
136
|
|
|
pairs = pairs.split(',') |
137
|
|
|
if len(pairs) == 0 or len(pairs) % 2 != 0: |
138
|
|
|
# Non-valid or malformed value |
139
|
|
|
setup_dashboard_panels_visibility_registry(section_name) |
140
|
|
|
return get_dashboard_panels_visibility_by_section(section_name) |
141
|
|
|
|
142
|
|
|
result = [ |
143
|
|
|
(pairs[i], pairs[i + 1]) for i in range(len(pairs)) if i % 2 == 0] |
144
|
|
|
return result |
145
|
|
|
|
146
|
|
|
|
147
|
|
|
def is_panel_visible_for_user(panel, user): |
148
|
|
|
""" |
149
|
|
|
Checks if the user is allowed to see the panel |
150
|
|
|
:param panel: panel ID as string |
151
|
|
|
:param user: a MemberData object |
152
|
|
|
:return: Boolean |
153
|
|
|
""" |
154
|
|
|
roles = user.getRoles() |
155
|
|
|
visibility = get_dashboard_panels_visibility_by_section(panel) |
156
|
|
|
for pair in visibility: |
157
|
|
|
if pair[0] in roles and pair[1] == 'yes': |
158
|
|
|
return True |
159
|
|
|
return False |
160
|
|
|
|
161
|
|
|
|
162
|
|
|
class DashboardView(BrowserView): |
163
|
|
|
template = ViewPageTemplateFile("templates/dashboard.pt") |
164
|
|
|
|
165
|
|
|
def __init__(self, context, request): |
166
|
|
|
BrowserView.__init__(self, context, request) |
167
|
|
|
self.dashboard_cookie = None |
168
|
|
|
self.member = None |
169
|
|
|
|
170
|
|
|
def __call__(self): |
171
|
|
|
frontpage_url = self.portal_url + "/senaite-frontpage" |
172
|
|
|
if not self.context.bika_setup.getDashboardByDefault(): |
173
|
|
|
# Do not render dashboard, render frontpage instead |
174
|
|
|
self.request.response.redirect(frontpage_url) |
175
|
|
|
return |
176
|
|
|
|
177
|
|
|
mtool = getToolByName(self.context, 'portal_membership') |
178
|
|
|
if mtool.isAnonymousUser(): |
179
|
|
|
# Anonymous user, redirect to frontpage |
180
|
|
|
self.request.response.redirect(frontpage_url) |
181
|
|
|
return |
182
|
|
|
|
183
|
|
|
self.member = mtool.getAuthenticatedMember() |
184
|
|
|
self.periodicity = self.request.get('p', PERIODICITY_WEEKLY) |
185
|
|
|
self.dashboard_cookie = self.check_dashboard_cookie() |
186
|
|
|
date_range = self.get_date_range(self.periodicity) |
187
|
|
|
self.date_from = date_range[0] |
188
|
|
|
self.date_to = date_range[1] |
189
|
|
|
|
190
|
|
|
return self.template() |
191
|
|
|
|
192
|
|
|
def check_dashboard_cookie(self): |
193
|
|
|
""" |
194
|
|
|
Check if the dashboard cookie should exist through bikasetup |
195
|
|
|
configuration. |
196
|
|
|
|
197
|
|
|
If it should exist but doesn't exist yet, the function creates it |
198
|
|
|
with all values as default. |
199
|
|
|
If it should exist and already exists, it returns the value. |
200
|
|
|
Otherwise, the function returns None. |
201
|
|
|
|
202
|
|
|
:return: a dictionary of strings |
203
|
|
|
""" |
204
|
|
|
# Getting cookie |
205
|
|
|
cookie_raw = self.request.get(DASHBOARD_FILTER_COOKIE, None) |
206
|
|
|
# If it doesn't exist, create it with default values |
207
|
|
|
if cookie_raw is None: |
208
|
|
|
cookie_raw = self._create_raw_data() |
209
|
|
|
self.request.response.setCookie( |
210
|
|
|
DASHBOARD_FILTER_COOKIE, |
211
|
|
|
json.dumps(cookie_raw), |
212
|
|
|
quoted=False, |
213
|
|
|
path='/') |
214
|
|
|
return cookie_raw |
215
|
|
|
return get_strings(json.loads(cookie_raw)) |
216
|
|
|
|
217
|
|
|
def is_filter_selected(self, selection_id, value): |
218
|
|
|
""" |
219
|
|
|
Compares whether the 'selection_id' parameter value saved in the |
220
|
|
|
cookie is the same value as the "value" parameter. |
221
|
|
|
|
222
|
|
|
:param selection_id: a string as a dashboard_cookie key. |
223
|
|
|
:param value: The value to compare against the value from |
224
|
|
|
dashboard_cookie key. |
225
|
|
|
:return: Boolean. |
226
|
|
|
""" |
227
|
|
|
selected = self.dashboard_cookie.get(selection_id) |
228
|
|
|
return selected == value |
229
|
|
|
|
230
|
|
|
def is_admin_user(self): |
231
|
|
|
""" |
232
|
|
|
Checks if the user is the admin or a SiteAdmin user. |
233
|
|
|
:return: Boolean |
234
|
|
|
""" |
235
|
|
|
user = api.user.get_current() |
236
|
|
|
roles = user.getRoles() |
237
|
|
|
return "LabManager" in roles or "Manager" in roles |
238
|
|
|
|
239
|
|
|
def _create_raw_data(self): |
240
|
|
|
""" |
241
|
|
|
Gathers the different sections ids and creates a string as first |
242
|
|
|
cookie data. |
243
|
|
|
|
244
|
|
|
:return: A dictionary like: |
245
|
|
|
{'analyses':'all','analysisrequest':'all','worksheets':'all'} |
246
|
|
|
""" |
247
|
|
|
result = {} |
248
|
|
|
for section in self.get_sections(): |
249
|
|
|
result[section.get('id')] = 'all' |
250
|
|
|
return result |
251
|
|
|
|
252
|
|
|
def get_date_range(self, periodicity=PERIODICITY_WEEKLY): |
253
|
|
|
"""Returns a date range (date from, date to) that suits with the passed |
254
|
|
|
in periodicity. |
255
|
|
|
|
256
|
|
|
:param periodicity: string that represents the periodicity |
257
|
|
|
:type periodicity: str |
258
|
|
|
:return: A date range |
259
|
|
|
:rtype: [(DateTime, DateTime)] |
260
|
|
|
""" |
261
|
|
|
today = datetime.date.today() |
262
|
|
|
if periodicity == PERIODICITY_DAILY: |
263
|
|
|
# Daily, load last 30 days |
264
|
|
|
date_from = DateTime() - 30 |
265
|
|
|
date_to = DateTime() + 1 |
266
|
|
|
return date_from, date_to |
267
|
|
|
|
268
|
|
|
if periodicity == PERIODICITY_MONTHLY: |
269
|
|
|
# Monthly, load last 2 years |
270
|
|
|
min_year = today.year - 1 if today.month == 12 else today.year - 2 |
271
|
|
|
min_month = 1 if today.month == 12 else today.month |
272
|
|
|
date_from = DateTime(min_year, min_month, 1) |
273
|
|
|
date_to = DateTime(today.year, today.month, |
274
|
|
|
monthrange(today.year, today.month)[1], |
275
|
|
|
23, 59, 59) |
276
|
|
|
return date_from, date_to |
277
|
|
|
|
278
|
|
|
if periodicity == PERIODICITY_QUARTERLY: |
279
|
|
|
# Quarterly, load last 4 years |
280
|
|
|
m = (((today.month - 1) / 3) * 3) + 1 |
281
|
|
|
min_year = today.year - 4 if today.month == 12 else today.year - 5 |
282
|
|
|
date_from = DateTime(min_year, m, 1) |
283
|
|
|
date_to = DateTime(today.year, m + 2, |
284
|
|
|
monthrange(today.year, m + 2)[1], 23, 59, |
285
|
|
|
59) |
286
|
|
|
return date_from, date_to |
287
|
|
|
if periodicity == PERIODICITY_BIANNUAL: |
288
|
|
|
# Biannual, load last 10 years |
289
|
|
|
m = (((today.month - 1) / 6) * 6) + 1 |
290
|
|
|
min_year = today.year - 10 if today.month == 12 else today.year - 11 |
291
|
|
|
date_from = DateTime(min_year, m, 1) |
292
|
|
|
date_to = DateTime(today.year, m + 5, |
293
|
|
|
monthrange(today.year, m + 5)[1], 23, 59, |
294
|
|
|
59) |
295
|
|
|
return date_from, date_to |
296
|
|
|
|
297
|
|
|
if periodicity in [PERIODICITY_YEARLY, PERIODICITY_ALL]: |
298
|
|
|
# Yearly or All time, load last 15 years |
299
|
|
|
min_year = today.year - 15 if today.month == 12 else today.year - 16 |
300
|
|
|
date_from = DateTime(min_year, 1, 1) |
301
|
|
|
date_to = DateTime(today.year, 12, 31, 23, 59, 59) |
302
|
|
|
return date_from, date_to |
303
|
|
|
|
304
|
|
|
# Default Weekly, load last six months |
305
|
|
|
year, weeknum, dow = today.isocalendar() |
306
|
|
|
min_year = today.year if today.month > 6 else today.year - 1 |
307
|
|
|
min_month = today.month - 6 if today.month > 6 \ |
308
|
|
|
else (today.month - 6) + 12 |
309
|
|
|
date_from = DateTime(min_year, min_month, 1) |
310
|
|
|
date_to = DateTime() - dow + 7 |
311
|
|
|
return date_from, date_to |
312
|
|
|
|
313
|
|
|
def get_sections(self): |
314
|
|
|
""" Returns an array with the sections to be displayed. |
315
|
|
|
Every section is a dictionary with the following structure: |
316
|
|
|
{'id': <section_identifier>, |
317
|
|
|
'title': <section_title>, |
318
|
|
|
'panels': <array of panels>} |
319
|
|
|
""" |
320
|
|
|
sections = [] |
321
|
|
|
user = api.user.get_current() |
322
|
|
|
if is_panel_visible_for_user('analyses', user): |
323
|
|
|
sections.append(self.get_analyses_section()) |
324
|
|
|
if is_panel_visible_for_user('analysisrequests', user): |
325
|
|
|
sections.append(self.get_analysisrequests_section()) |
326
|
|
|
if is_panel_visible_for_user('worksheets', user): |
327
|
|
|
sections.append(self.get_worksheets_section()) |
328
|
|
|
return sections |
329
|
|
|
|
330
|
|
|
def get_filter_options(self): |
331
|
|
|
""" |
332
|
|
|
Returns dasboard filter options. |
333
|
|
|
:return: Boolean |
334
|
|
|
""" |
335
|
|
|
dash_opt = DisplayList(( |
336
|
|
|
('all', _('All')), |
337
|
|
|
('mine', _('Mine')), |
338
|
|
|
)) |
339
|
|
|
return dash_opt |
340
|
|
|
|
341
|
|
|
def _getStatistics(self, name, description, url, catalog, criterias, total): |
342
|
|
|
out = {'type': 'simple-panel', |
343
|
|
|
'name': name, |
344
|
|
|
'class': 'informative', |
345
|
|
|
'description': description, |
346
|
|
|
'total': total, |
347
|
|
|
'link': self.portal_url + '/' + url} |
348
|
|
|
|
349
|
|
|
results = 0 |
350
|
|
|
ratio = 0 |
351
|
|
|
if total > 0: |
352
|
|
|
results = self.search_count(criterias, catalog.id) |
353
|
|
|
results = results if total >= results else total |
354
|
|
|
ratio = (float(results)/float(total))*100 if results > 0 else 0 |
355
|
|
|
ratio = str("%%.%sf" % 1) % ratio |
356
|
|
|
out['legend'] = _('of') + " " + str(total) + ' (' + ratio + '%)' |
357
|
|
|
out['number'] = results |
358
|
|
|
out['percentage'] = float(ratio) |
359
|
|
|
return out |
360
|
|
|
|
361
|
|
|
def get_analysisrequests_section(self): |
362
|
|
|
""" Returns the section dictionary related with Analysis |
363
|
|
|
Requests, that contains some informative panels (like |
364
|
|
|
ARs to be verified, ARs to be published, etc.) |
365
|
|
|
""" |
366
|
|
|
out = [] |
367
|
|
|
catalog = getToolByName(self.context, CATALOG_ANALYSIS_REQUEST_LISTING) |
368
|
|
|
query = {'portal_type': "AnalysisRequest", |
369
|
|
|
'cancellation_state': ['active']} |
370
|
|
|
|
371
|
|
|
# Check if dashboard_cookie contains any values to query |
372
|
|
|
# elements by |
373
|
|
|
query = self._update_criteria_with_filters(query, 'analysisrequests') |
374
|
|
|
|
375
|
|
|
# Active Analysis Requests (All) |
376
|
|
|
total = self.search_count(query, catalog.id) |
377
|
|
|
|
378
|
|
|
# Sampling workflow enabled? |
379
|
|
|
if self.context.bika_setup.getSamplingWorkflowEnabled(): |
380
|
|
|
# Analysis Requests awaiting to be sampled or scheduled |
381
|
|
|
name = _('Analysis Requests to be sampled') |
382
|
|
|
desc = _("To be sampled") |
383
|
|
|
purl = 'samples?samples_review_state=to_be_sampled' |
384
|
|
|
query['review_state'] = ['to_be_sampled', ] |
385
|
|
|
query['cancellation_state'] = ['active', ] |
386
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
387
|
|
|
|
388
|
|
|
# Analysis Requests awaiting to be preserved |
389
|
|
|
name = _('Analysis Requests to be preserved') |
390
|
|
|
desc = _("To be preserved") |
391
|
|
|
purl = 'samples?samples_review_state=to_be_preserved' |
392
|
|
|
query['review_state'] = ['to_be_preserved', ] |
393
|
|
|
query['cancellation_state'] = ['active', ] |
394
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
395
|
|
|
|
396
|
|
|
# Analysis Requests scheduled for Sampling |
397
|
|
|
name = _('Analysis Requests scheduled for sampling') |
398
|
|
|
desc = _("Sampling scheduled") |
399
|
|
|
purl = 'samples?samples_review_state=scheduled_sampling' |
400
|
|
|
query['review_state'] = ['scheduled_sampling', ] |
401
|
|
|
query['cancellation_state'] = ['active', ] |
402
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
403
|
|
|
|
404
|
|
|
# Analysis Requests awaiting for reception |
405
|
|
|
name = _('Analysis Requests to be received') |
406
|
|
|
desc = _("Reception pending") |
407
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=sample_due' |
408
|
|
|
query['review_state'] = ['sample_due', ] |
409
|
|
|
query['cancellation_state'] = ['active', ] |
410
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
411
|
|
|
|
412
|
|
|
# Analysis Requests under way |
413
|
|
|
name = _('Analysis Requests with results pending') |
414
|
|
|
desc = _("Results pending") |
415
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=sample_received' |
416
|
|
|
query['review_state'] = ['attachment_due', |
417
|
|
|
'sample_received', ] |
418
|
|
|
query['cancellation_state'] = ['active', ] |
419
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
420
|
|
|
|
421
|
|
|
# Analysis Requests to be verified |
422
|
|
|
name = _('Analysis Requests to be verified') |
423
|
|
|
desc = _("To be verified") |
424
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=to_be_verified' |
425
|
|
|
query['review_state'] = ['to_be_verified', ] |
426
|
|
|
query['cancellation_state'] = ['active', ] |
427
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
428
|
|
|
|
429
|
|
|
# Analysis Requests verified (to be published) |
430
|
|
|
name = _('Analysis Requests verified') |
431
|
|
|
desc = _("Verified") |
432
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=verified' |
433
|
|
|
query['review_state'] = ['verified', ] |
434
|
|
|
query['cancellation_state'] = ['active', ] |
435
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
436
|
|
|
|
437
|
|
|
# Analysis Requests published |
438
|
|
|
name = _('Analysis Requests published') |
439
|
|
|
desc = _("Published") |
440
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=published' |
441
|
|
|
query['review_state'] = ['published', ] |
442
|
|
|
query['cancellation_state'] = ['active', ] |
443
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
444
|
|
|
|
445
|
|
|
# Analysis Requests to be printed |
446
|
|
|
if self.context.bika_setup.getPrintingWorkflowEnabled(): |
447
|
|
|
name = _('Analysis Requests to be printed') |
448
|
|
|
desc = _("To be printed") |
449
|
|
|
purl = 'analysisrequests?analysisrequests_getPrinted=0' |
450
|
|
|
query['getPrinted'] = '0' |
451
|
|
|
query['review_state'] = ['published', ] |
452
|
|
|
query['cancellation_state'] = ['active', ] |
453
|
|
|
out.append( |
454
|
|
|
self._getStatistics(name, desc, purl, catalog, query, total)) |
455
|
|
|
|
456
|
|
|
# Chart with the evolution of ARs over a period, grouped by |
457
|
|
|
# periodicity |
458
|
|
|
outevo = self.fill_dates_evo(catalog, query) |
459
|
|
|
out.append({'type': 'bar-chart-panel', |
460
|
|
|
'name': _('Evolution of Analysis Requests'), |
461
|
|
|
'class': 'informative', |
462
|
|
|
'description': _('Evolution of Analysis Requests'), |
463
|
|
|
'data': json.dumps(outevo), |
464
|
|
|
'datacolors': json.dumps(self.get_colors_palette())}) |
465
|
|
|
|
466
|
|
|
return {'id': 'analysisrequests', |
467
|
|
|
'title': _('Analysis Requests'), |
468
|
|
|
'panels': out} |
469
|
|
|
|
470
|
|
|
def get_worksheets_section(self): |
471
|
|
|
""" Returns the section dictionary related with Worksheets, |
472
|
|
|
that contains some informative panels (like |
473
|
|
|
WS to be verified, WS with results pending, etc.) |
474
|
|
|
""" |
475
|
|
|
out = [] |
476
|
|
|
bc = getToolByName(self.context, CATALOG_WORKSHEET_LISTING) |
477
|
|
|
query = {'portal_type': "Worksheet", } |
478
|
|
|
|
479
|
|
|
# Check if dashboard_cookie contains any values to query |
480
|
|
|
# elements by |
481
|
|
|
query = self._update_criteria_with_filters(query, 'worksheets') |
482
|
|
|
|
483
|
|
|
# Active Worksheets (all) |
484
|
|
|
total = self.search_count(query, bc.id) |
485
|
|
|
|
486
|
|
|
# Open worksheets |
487
|
|
|
name = _('Results pending') |
488
|
|
|
desc = _('Results pending') |
489
|
|
|
purl = 'worksheets?list_review_state=open' |
490
|
|
|
query['review_state'] = ['open', 'attachment_due'] |
491
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
492
|
|
|
|
493
|
|
|
# Worksheets to be verified |
494
|
|
|
name = _('To be verified') |
495
|
|
|
desc = _('To be verified') |
496
|
|
|
purl = 'worksheets?list_review_state=to_be_verified' |
497
|
|
|
query['review_state'] = ['to_be_verified', ] |
498
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
499
|
|
|
|
500
|
|
|
# Worksheets verified |
501
|
|
|
name = _('Verified') |
502
|
|
|
desc = _('Verified') |
503
|
|
|
purl = 'worksheets?list_review_state=verified' |
504
|
|
|
query['review_state'] = ['verified', ] |
505
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
506
|
|
|
|
507
|
|
|
# Chart with the evolution of WSs over a period, grouped by |
508
|
|
|
# periodicity |
509
|
|
|
outevo = self.fill_dates_evo(bc, query) |
510
|
|
|
out.append({'type': 'bar-chart-panel', |
511
|
|
|
'name': _('Evolution of Worksheets'), |
512
|
|
|
'class': 'informative', |
513
|
|
|
'description': _('Evolution of Worksheets'), |
514
|
|
|
'data': json.dumps(outevo), |
515
|
|
|
'datacolors': json.dumps(self.get_colors_palette())}) |
516
|
|
|
|
517
|
|
|
return {'id': 'worksheets', |
518
|
|
|
'title': _('Worksheets'), |
519
|
|
|
'panels': out} |
520
|
|
|
|
521
|
|
|
def get_analyses_section(self): |
522
|
|
|
""" Returns the section dictionary related with Analyses, |
523
|
|
|
that contains some informative panels (analyses pending |
524
|
|
|
analyses assigned, etc.) |
525
|
|
|
""" |
526
|
|
|
out = [] |
527
|
|
|
bc = getToolByName(self.context, CATALOG_ANALYSIS_LISTING) |
528
|
|
|
query = {'portal_type': "Analysis", |
529
|
|
|
'cancellation_state': 'active'} |
530
|
|
|
|
531
|
|
|
# Check if dashboard_cookie contains any values to query elements by |
532
|
|
|
query = self._update_criteria_with_filters(query, 'analyses') |
533
|
|
|
|
534
|
|
|
# Active Analyses (All) |
535
|
|
|
total = self.search_count(query, bc.id) |
536
|
|
|
|
537
|
|
|
# Analyses to be assigned |
538
|
|
|
name = _('Assignment pending') |
539
|
|
|
desc = _('Assignment pending') |
540
|
|
|
purl = '#' |
541
|
|
|
query['review_state'] = ['unassigned'] |
542
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
543
|
|
|
|
544
|
|
|
# Analyses pending |
545
|
|
|
name = _('Results pending') |
546
|
|
|
desc = _('Results pending') |
547
|
|
|
purl = '#' |
548
|
|
|
query['review_state'] = ['unassigned', 'assigned', ] |
549
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
550
|
|
|
|
551
|
|
|
# Analyses to be verified |
552
|
|
|
name = _('To be verified') |
553
|
|
|
desc = _('To be verified') |
554
|
|
|
purl = '#' |
555
|
|
|
query['review_state'] = ['to_be_verified', ] |
556
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
557
|
|
|
|
558
|
|
|
# Analyses verified |
559
|
|
|
name = _('Verified') |
560
|
|
|
desc = _('Verified') |
561
|
|
|
purl = '#' |
562
|
|
|
query['review_state'] = ['verified', ] |
563
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
564
|
|
|
|
565
|
|
|
# Chart with the evolution of Analyses over a period, grouped by |
566
|
|
|
# periodicity |
567
|
|
|
outevo = self.fill_dates_evo(bc, query) |
568
|
|
|
out.append({'type': 'bar-chart-panel', |
569
|
|
|
'name': _('Evolution of Analyses'), |
570
|
|
|
'class': 'informative', |
571
|
|
|
'description': _('Evolution of Analyses'), |
572
|
|
|
'data': json.dumps(outevo), |
573
|
|
|
'datacolors': json.dumps(self.get_colors_palette())}) |
574
|
|
|
return {'id': 'analyses', |
575
|
|
|
'title': _('Analyses'), |
576
|
|
|
'panels': out} |
577
|
|
|
|
578
|
|
|
def get_states_map(self, portal_type): |
579
|
|
|
if portal_type == 'Analysis': |
580
|
|
|
return {'unassigned': _('Assignment pending'), |
581
|
|
|
'assigned': _('Results pending'), |
582
|
|
|
'to_be_verified': _('To be verified'), |
583
|
|
|
'rejected': _('Rejected'), |
584
|
|
|
'retracted': _('Retracted'), |
585
|
|
|
'verified': _('Verified'), |
586
|
|
|
'published': _('Published')} |
587
|
|
|
elif portal_type == 'AnalysisRequest': |
588
|
|
|
return {'to_be_sampled': _('To be sampled'), |
589
|
|
|
'to_be_preserved': _('To be preserved'), |
590
|
|
|
'scheduled_sampling': _('Sampling scheduled'), |
591
|
|
|
'sample_due': _('Reception pending'), |
592
|
|
|
'rejected': _('Rejected'), |
593
|
|
|
'invalid': _('Invalid'), |
594
|
|
|
'sample_received': _('Results pending'), |
595
|
|
|
'assigned': _('Results pending'), |
596
|
|
|
'attachment_due': _('Results pending'), |
597
|
|
|
'to_be_verified': _('To be verified'), |
598
|
|
|
'verified': _('Verified'), |
599
|
|
|
'published': _('Published')} |
600
|
|
|
elif portal_type == 'Worksheet': |
601
|
|
|
return {'open': _('Results pending'), |
602
|
|
|
'attachment_due': _('Results pending'), |
603
|
|
|
'to_be_verified': _('To be verified'), |
604
|
|
|
'verified': _('Verified')} |
605
|
|
|
|
606
|
|
|
def get_colors_palette(self): |
607
|
|
|
return { |
608
|
|
|
'to_be_sampled': '#917A4C', |
609
|
|
|
_('To be sampled'): '#917A4C', |
610
|
|
|
|
611
|
|
|
'to_be_preserved': '#C2803E', |
612
|
|
|
_('To be preserved'): '#C2803E', |
613
|
|
|
|
614
|
|
|
'scheduled_sampling': '#F38630', |
615
|
|
|
_('Sampling scheduled'): '#F38630', |
616
|
|
|
|
617
|
|
|
'sample_due': '#FA6900', |
618
|
|
|
_('Reception pending'): '#FA6900', |
619
|
|
|
|
620
|
|
|
'sample_received': '#E0E4CC', |
621
|
|
|
_('Assignment pending'): '#E0E4CC', |
622
|
|
|
_('Sample received'): '#E0E4CC', |
623
|
|
|
|
624
|
|
|
'assigned': '#dcdcdc', |
625
|
|
|
'attachment_due': '#dcdcdc', |
626
|
|
|
'open': '#dcdcdc', |
627
|
|
|
_('Results pending'): '#dcdcdc', |
628
|
|
|
|
629
|
|
|
'rejected': '#FF6B6B', |
630
|
|
|
'retracted': '#FF6B6B', |
631
|
|
|
_('Rejected'): '#FF6B6B', |
632
|
|
|
_('Retracted'): '#FF6B6B', |
633
|
|
|
|
634
|
|
|
'invalid': '#C44D58', |
635
|
|
|
_('Invalid'): '#C44D58', |
636
|
|
|
|
637
|
|
|
'to_be_verified': '#A7DBD8', |
638
|
|
|
_('To be verified'): '#A7DBD8', |
639
|
|
|
|
640
|
|
|
'verified': '#69D2E7', |
641
|
|
|
_('Verified'): '#69D2E7', |
642
|
|
|
|
643
|
|
|
'published': '#83AF9B', |
644
|
|
|
_('Published'): '#83AF9B', |
645
|
|
|
} |
646
|
|
|
|
647
|
|
|
def _getDateStr(self, period, created): |
648
|
|
|
if period == PERIODICITY_YEARLY: |
649
|
|
|
created = created.year() |
650
|
|
|
elif period == PERIODICITY_BIANNUAL: |
651
|
|
|
m = (((created.month()-1)/6)*6)+1 |
652
|
|
|
created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2)) |
653
|
|
|
elif period == PERIODICITY_QUARTERLY: |
654
|
|
|
m = (((created.month()-1)/3)*3)+1 |
655
|
|
|
created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2)) |
656
|
|
|
elif period == PERIODICITY_MONTHLY: |
657
|
|
|
created = '%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2)) |
658
|
|
|
elif period == PERIODICITY_WEEKLY: |
659
|
|
|
d = (((created.day()-1)/7)*7)+1 |
660
|
|
|
year, weeknum, dow = created.asdatetime().isocalendar() |
661
|
|
|
created = created - dow |
662
|
|
|
created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2)) |
663
|
|
|
elif period == PERIODICITY_ALL: |
664
|
|
|
# All time, but evolution chart grouped by year |
665
|
|
|
created = created.year() |
666
|
|
|
else: |
667
|
|
|
created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2)) |
668
|
|
|
return created |
669
|
|
|
|
670
|
|
|
def fill_dates_evo(self, catalog, query): |
671
|
|
|
sorted_query = collections.OrderedDict(sorted(query.items())) |
672
|
|
|
query_json = json.dumps(sorted_query) |
673
|
|
|
return self._fill_dates_evo(query_json, catalog.id, self.periodicity) |
674
|
|
|
|
675
|
|
|
def _fill_dates_evo_cachekey(method, self, query_json, catalog_name, |
676
|
|
|
periodicity): |
677
|
|
|
hour = time() // (60 * 60 * 2) |
678
|
|
|
return hour, catalog_name, query_json, periodicity |
679
|
|
|
|
680
|
|
|
@ram.cache(_fill_dates_evo_cachekey) |
681
|
|
|
def _fill_dates_evo(self, query_json, catalog_name, periodicity): |
682
|
|
|
"""Returns an array of dictionaries, where each dictionary contains the |
683
|
|
|
amount of items created at a given date and grouped by review_state, |
684
|
|
|
based on the passed in periodicity. |
685
|
|
|
|
686
|
|
|
This is an expensive function that will not be called more than once |
687
|
|
|
every 2 hours (note cache decorator with `time() // (60 * 60 * 2) |
688
|
|
|
""" |
689
|
|
|
outevoidx = {} |
690
|
|
|
outevo = [] |
691
|
|
|
days = 1 |
692
|
|
|
if periodicity == PERIODICITY_YEARLY: |
693
|
|
|
days = 336 |
694
|
|
|
elif periodicity == PERIODICITY_BIANNUAL: |
695
|
|
|
days = 168 |
696
|
|
|
elif periodicity == PERIODICITY_QUARTERLY: |
697
|
|
|
days = 84 |
698
|
|
|
elif periodicity == PERIODICITY_MONTHLY: |
699
|
|
|
days = 28 |
700
|
|
|
elif periodicity == PERIODICITY_WEEKLY: |
701
|
|
|
days = 7 |
702
|
|
|
elif periodicity == PERIODICITY_ALL: |
703
|
|
|
days = 336 |
704
|
|
|
|
705
|
|
|
# Get the date range |
706
|
|
|
date_from, date_to = self.get_date_range(periodicity) |
707
|
|
|
query = json.loads(query_json) |
708
|
|
|
if 'review_state' in query: |
709
|
|
|
del query['review_state'] |
710
|
|
|
query['sort_on'] = 'created' |
711
|
|
|
query['created'] = {'query': (date_from, date_to), |
712
|
|
|
'range': 'min:max'} |
713
|
|
|
|
714
|
|
|
otherstate = _('Other status') |
715
|
|
|
statesmap = self.get_states_map(query['portal_type']) |
716
|
|
|
stats = statesmap.values() |
717
|
|
|
stats.sort() |
718
|
|
|
stats.append(otherstate) |
719
|
|
|
statscount = {s: 0 for s in stats} |
720
|
|
|
# Add first all periods, cause we want all segments to be displayed |
721
|
|
|
curr = date_from.asdatetime() |
722
|
|
|
end = date_to.asdatetime() |
723
|
|
|
while curr < end: |
724
|
|
|
currstr = self._getDateStr(periodicity, DateTime(curr)) |
725
|
|
|
if currstr not in outevoidx: |
726
|
|
|
outdict = {'date': currstr} |
727
|
|
|
for k in stats: |
728
|
|
|
outdict[k] = 0 |
729
|
|
|
outevo.append(outdict) |
730
|
|
|
outevoidx[currstr] = len(outevo)-1 |
731
|
|
|
curr = curr + datetime.timedelta(days=days) |
732
|
|
|
|
733
|
|
|
brains = search(query, catalog_name) |
734
|
|
|
for brain in brains: |
735
|
|
|
created = brain.created |
736
|
|
|
state = brain.review_state |
737
|
|
|
if state not in statesmap: |
738
|
|
|
logger.warn("'%s' State for '%s' not available" % (state, query['portal_type'])) |
739
|
|
|
state = statesmap[state] if state in statesmap else otherstate |
740
|
|
|
created = self._getDateStr(periodicity, created) |
741
|
|
|
statscount[state] += 1 |
742
|
|
|
if created in outevoidx: |
743
|
|
|
oidx = outevoidx[created] |
744
|
|
|
if state in outevo[oidx]: |
745
|
|
|
outevo[oidx][state] += 1 |
746
|
|
|
else: |
747
|
|
|
outevo[oidx][state] = 1 |
748
|
|
|
else: |
749
|
|
|
# Create new row |
750
|
|
|
currow = {'date': created, |
751
|
|
|
state: 1} |
752
|
|
|
outevo.append(currow) |
753
|
|
|
|
754
|
|
|
# Remove all those states for which there is no data |
755
|
|
|
rstates = [k for k, v in statscount.items() if v == 0] |
756
|
|
|
for o in outevo: |
757
|
|
|
for r in rstates: |
758
|
|
|
if r in o: |
759
|
|
|
del o[r] |
760
|
|
|
|
761
|
|
|
# Sort available status by number of occurences descending |
762
|
|
|
sorted_states = sorted(statscount.items(), key=itemgetter(1)) |
763
|
|
|
sorted_states = map(lambda item: item[0], sorted_states) |
764
|
|
|
sorted_states.reverse() |
765
|
|
|
return {'data': outevo, 'states': sorted_states} |
766
|
|
|
|
767
|
|
|
def search_count(self, query, catalog_name): |
768
|
|
|
sorted_query = collections.OrderedDict(sorted(query.items())) |
769
|
|
|
query_json = json.dumps(sorted_query) |
770
|
|
|
return self._search_count(query_json, catalog_name) |
771
|
|
|
|
772
|
|
|
@viewcache.memoize |
773
|
|
|
def _search_count(self, query_json, catalog_name): |
774
|
|
|
query = json.loads(query_json) |
775
|
|
|
brains = search(query, catalog_name) |
776
|
|
|
return len(brains) |
777
|
|
|
|
778
|
|
|
def _update_criteria_with_filters(self, query, section_name): |
779
|
|
|
""" |
780
|
|
|
This method updates the 'query' dictionary with the criteria stored in |
781
|
|
|
dashboard cookie. |
782
|
|
|
|
783
|
|
|
:param query: A dictionary with search criteria. |
784
|
|
|
:param section_name: The dashboard section name |
785
|
|
|
:return: The 'query' dictionary |
786
|
|
|
""" |
787
|
|
|
if self.dashboard_cookie is None: |
788
|
|
|
return query |
789
|
|
|
cookie_criteria = self.dashboard_cookie.get(section_name) |
790
|
|
|
if cookie_criteria == 'mine': |
791
|
|
|
query['Creator'] = self.member.getId() |
792
|
|
|
return query |
793
|
|
|
|
794
|
|
|
def get_dashboard_panels_visibility(self, section_name): |
795
|
|
|
""" |
796
|
|
|
Return a list of pairs as values that represents the role-permission |
797
|
|
|
view relation for the panel section. |
798
|
|
|
:param section_name: the panels section id. |
799
|
|
|
:return: a list of tuples. |
800
|
|
|
""" |
801
|
|
|
return get_dashboard_panels_visibility_by_section(section_name) |
802
|
|
|
|
803
|
|
|
|
804
|
|
|
class DashboardViewPermissionUpdate(BrowserView): |
805
|
|
|
""" |
806
|
|
|
Updates the values in 'bika.lims.dashboard_panels_visibility' registry. |
807
|
|
|
""" |
808
|
|
|
|
809
|
|
|
def __call__(self): |
810
|
|
|
protect.CheckAuthenticator(self.request) |
811
|
|
|
# Getting values from post |
812
|
|
|
section_name = self.request.get('section_name', None) |
813
|
|
|
if section_name is None: |
814
|
|
|
return None |
815
|
|
|
role_id = self.request.get('role_id', None) |
816
|
|
|
if role_id is None: |
817
|
|
|
return None |
818
|
|
|
check_state = self.request.get('check_state', None) |
819
|
|
|
if check_state is None: |
820
|
|
|
return None |
821
|
|
|
elif check_state == 'false': |
822
|
|
|
check_state = 'no' |
823
|
|
|
else: |
824
|
|
|
check_state = 'yes' |
825
|
|
|
# Update registry |
826
|
|
|
registry_info = get_dashboard_registry_record() |
827
|
|
|
pairs = get_dashboard_panels_visibility_by_section(section_name) |
828
|
|
|
role_permissions = list() |
829
|
|
|
for pair in pairs: |
830
|
|
|
visibility = pair[1] |
831
|
|
|
if pair[0] == role_id: |
832
|
|
|
visibility = check_state |
833
|
|
|
value = '{0},{1}'.format(pair[0], visibility) |
834
|
|
|
role_permissions.append(value) |
835
|
|
|
role_permissions = ','.join(role_permissions) |
836
|
|
|
# Set permissions string into dict |
837
|
|
|
registry_info[section_name] = get_unicode(role_permissions) |
838
|
|
|
set_dashboard_registry_record(registry_info) |
839
|
|
|
return True |
840
|
|
|
|