1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# This file is part of SENAITE.CORE. |
4
|
|
|
# |
5
|
|
|
# SENAITE.CORE is free software: you can redistribute it and/or modify it under |
6
|
|
|
# the terms of the GNU General Public License as published by the Free Software |
7
|
|
|
# Foundation, version 2. |
8
|
|
|
# |
9
|
|
|
# This program is distributed in the hope that it will be useful, but WITHOUT |
10
|
|
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
11
|
|
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more |
12
|
|
|
# details. |
13
|
|
|
# |
14
|
|
|
# You should have received a copy of the GNU General Public License along with |
15
|
|
|
# this program; if not, write to the Free Software Foundation, Inc., 51 |
16
|
|
|
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
17
|
|
|
# |
18
|
|
|
# Copyright 2018-2019 by it's authors. |
19
|
|
|
# Some rights reserved, see README and LICENSE. |
20
|
|
|
|
21
|
|
|
import collections |
22
|
|
|
import datetime |
23
|
|
|
import json |
24
|
|
|
from calendar import monthrange |
25
|
|
|
from operator import itemgetter |
26
|
|
|
from time import time |
27
|
|
|
|
28
|
|
|
from DateTime import DateTime |
29
|
|
|
from Products.Archetypes.public import DisplayList |
30
|
|
|
from Products.CMFCore.utils import getToolByName |
31
|
|
|
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile |
32
|
|
|
from bika.lims import bikaMessageFactory as _ |
33
|
|
|
from bika.lims import logger |
34
|
|
|
from bika.lims.api import get_tool |
35
|
|
|
from bika.lims.api import search |
36
|
|
|
from bika.lims.browser import BrowserView |
37
|
|
|
from bika.lims.catalog import CATALOG_ANALYSIS_LISTING |
38
|
|
|
from bika.lims.catalog import CATALOG_ANALYSIS_REQUEST_LISTING |
39
|
|
|
from bika.lims.catalog import CATALOG_WORKSHEET_LISTING |
40
|
|
|
from bika.lims.utils import get_strings |
41
|
|
|
from bika.lims.utils import get_unicode |
42
|
|
|
from plone import api |
43
|
|
|
from plone import protect |
44
|
|
|
from plone.api.exc import InvalidParameterError |
45
|
|
|
from plone.memoize import ram |
46
|
|
|
from plone.memoize import view as viewcache |
47
|
|
|
|
48
|
|
|
DASHBOARD_FILTER_COOKIE = 'dashboard_filter_cookie' |
49
|
|
|
|
50
|
|
|
# Supported periodicities for evolution charts |
51
|
|
|
PERIODICITY_DAILY = "d" |
52
|
|
|
PERIODICITY_WEEKLY = "w" |
53
|
|
|
PERIODICITY_MONTHLY = "m" |
54
|
|
|
PERIODICITY_QUARTERLY = "q" |
55
|
|
|
PERIODICITY_BIANNUAL = "b" |
56
|
|
|
PERIODICITY_YEARLY = "y" |
57
|
|
|
PERIODICITY_ALL = "a" |
58
|
|
|
|
59
|
|
|
|
60
|
|
|
def get_dashboard_registry_record(): |
61
|
|
|
""" |
62
|
|
|
Return the 'bika.lims.dashboard_panels_visibility' values. |
63
|
|
|
:return: A dictionary or None |
64
|
|
|
""" |
65
|
|
|
try: |
66
|
|
|
registry = api.portal.get_registry_record( |
67
|
|
|
'bika.lims.dashboard_panels_visibility') |
68
|
|
|
return registry |
69
|
|
|
except InvalidParameterError: |
70
|
|
|
# No entry in the registry for dashboard panels roles. |
71
|
|
|
# Maybe upgradestep 1.1.8 was not run? |
72
|
|
|
logger.warn("Cannot find a record with name " |
73
|
|
|
"'bika.lims.dashboard_panels_visibility' in " |
74
|
|
|
"registry_record. Missed upgrade 1.1.8?") |
75
|
|
|
return dict() |
76
|
|
|
|
77
|
|
|
|
78
|
|
|
def set_dashboard_registry_record(registry_info): |
79
|
|
|
""" |
80
|
|
|
Sets the 'bika.lims.dashboard_panels_visibility' values. |
81
|
|
|
|
82
|
|
|
:param registry_info: A dictionary type object with all its values as |
83
|
|
|
*unicode* objects. |
84
|
|
|
:return: A dictionary or None |
85
|
|
|
""" |
86
|
|
|
try: |
87
|
|
|
api.portal.set_registry_record( |
88
|
|
|
'bika.lims.dashboard_panels_visibility', registry_info) |
89
|
|
|
except InvalidParameterError: |
90
|
|
|
# No entry in the registry for dashboard panels roles. |
91
|
|
|
# Maybe upgradestep 1.1.8 was not run? |
92
|
|
|
logger.warn("Cannot find a record with name " |
93
|
|
|
"'bika.lims.dashboard_panels_visibility' in " |
94
|
|
|
"registry_record. Missed upgrade 1.1.8?") |
95
|
|
|
|
96
|
|
|
|
97
|
|
|
def setup_dashboard_panels_visibility_registry(section_name): |
98
|
|
|
""" |
99
|
|
|
Initializes the values for panels visibility in registry_records. By |
100
|
|
|
default, only users with LabManager or Manager roles can see the panels. |
101
|
|
|
:param section_name: |
102
|
|
|
:return: An string like: "role1,yes,role2,no,rol3,no" |
103
|
|
|
""" |
104
|
|
|
registry_info = get_dashboard_registry_record() |
105
|
|
|
role_permissions_list = [] |
106
|
|
|
# Getting roles defined in the system |
107
|
|
|
roles = [] |
108
|
|
|
acl_users = get_tool("acl_users") |
109
|
|
|
roles_tree = acl_users.portal_role_manager.listRoleIds() |
110
|
|
|
for role in roles_tree: |
111
|
|
|
roles.append(role) |
112
|
|
|
# Set view permissions to each role as 'yes': |
113
|
|
|
# "role1,yes,role2,no,rol3,no" |
114
|
|
|
for role in roles: |
115
|
|
|
role_permissions_list.append(role) |
116
|
|
|
visible = 'no' |
117
|
|
|
if role in ['LabManager', 'Manager']: |
118
|
|
|
visible = 'yes' |
119
|
|
|
role_permissions_list.append(visible) |
120
|
|
|
role_permissions = ','.join(role_permissions_list) |
121
|
|
|
|
122
|
|
|
# Set permissions string into dict |
123
|
|
|
registry_info[get_unicode(section_name)] = get_unicode(role_permissions) |
|
|
|
|
124
|
|
|
# Set new values to registry record |
125
|
|
|
set_dashboard_registry_record(registry_info) |
126
|
|
|
return registry_info |
127
|
|
|
|
128
|
|
|
|
129
|
|
|
def get_dashboard_panels_visibility_by_section(section_name): |
130
|
|
|
""" |
131
|
|
|
Return a list of pairs as values that represents the role-permission |
132
|
|
|
view relation for the panel section passed in. |
133
|
|
|
:param section_name: the panels section id. |
134
|
|
|
:return: a list of tuples. |
135
|
|
|
""" |
136
|
|
|
registry_info = get_dashboard_registry_record() |
137
|
|
|
if section_name not in registry_info: |
138
|
|
|
# Registry hasn't been set, do it at least for this section |
139
|
|
|
registry_info = \ |
140
|
|
|
setup_dashboard_panels_visibility_registry(section_name) |
141
|
|
|
|
142
|
|
|
pairs = registry_info.get(section_name) |
143
|
|
|
pairs = get_strings(pairs) |
144
|
|
|
if pairs is None: |
145
|
|
|
# In the registry, but with None value? |
146
|
|
|
setup_dashboard_panels_visibility_registry(section_name) |
147
|
|
|
return get_dashboard_panels_visibility_by_section(section_name) |
148
|
|
|
|
149
|
|
|
pairs = pairs.split(',') |
150
|
|
|
if len(pairs) == 0 or len(pairs) % 2 != 0: |
151
|
|
|
# Non-valid or malformed value |
152
|
|
|
setup_dashboard_panels_visibility_registry(section_name) |
153
|
|
|
return get_dashboard_panels_visibility_by_section(section_name) |
154
|
|
|
|
155
|
|
|
result = [ |
156
|
|
|
(pairs[i], pairs[i + 1]) for i in range(len(pairs)) if i % 2 == 0] |
157
|
|
|
return result |
158
|
|
|
|
159
|
|
|
|
160
|
|
|
def is_panel_visible_for_user(panel, user): |
161
|
|
|
""" |
162
|
|
|
Checks if the user is allowed to see the panel |
163
|
|
|
:param panel: panel ID as string |
164
|
|
|
:param user: a MemberData object |
165
|
|
|
:return: Boolean |
166
|
|
|
""" |
167
|
|
|
roles = user.getRoles() |
168
|
|
|
visibility = get_dashboard_panels_visibility_by_section(panel) |
169
|
|
|
for pair in visibility: |
170
|
|
|
if pair[0] in roles and pair[1] == 'yes': |
171
|
|
|
return True |
172
|
|
|
return False |
173
|
|
|
|
174
|
|
|
|
175
|
|
|
class DashboardView(BrowserView): |
176
|
|
|
template = ViewPageTemplateFile("templates/dashboard.pt") |
177
|
|
|
|
178
|
|
|
def __init__(self, context, request): |
179
|
|
|
BrowserView.__init__(self, context, request) |
180
|
|
|
self.dashboard_cookie = None |
181
|
|
|
self.member = None |
182
|
|
|
|
183
|
|
|
def __call__(self): |
184
|
|
|
frontpage_url = self.portal_url + "/senaite-frontpage" |
185
|
|
|
if not self.context.bika_setup.getDashboardByDefault(): |
186
|
|
|
# Do not render dashboard, render frontpage instead |
187
|
|
|
self.request.response.redirect(frontpage_url) |
188
|
|
|
return |
189
|
|
|
|
190
|
|
|
mtool = getToolByName(self.context, 'portal_membership') |
191
|
|
|
if mtool.isAnonymousUser(): |
192
|
|
|
# Anonymous user, redirect to frontpage |
193
|
|
|
self.request.response.redirect(frontpage_url) |
194
|
|
|
return |
195
|
|
|
|
196
|
|
|
self.member = mtool.getAuthenticatedMember() |
197
|
|
|
self.periodicity = self.request.get('p', PERIODICITY_WEEKLY) |
198
|
|
|
self.dashboard_cookie = self.check_dashboard_cookie() |
199
|
|
|
date_range = self.get_date_range(self.periodicity) |
200
|
|
|
self.date_from = date_range[0] |
201
|
|
|
self.date_to = date_range[1] |
202
|
|
|
|
203
|
|
|
return self.template() |
204
|
|
|
|
205
|
|
|
def check_dashboard_cookie(self): |
206
|
|
|
""" |
207
|
|
|
Check if the dashboard cookie should exist through bikasetup |
208
|
|
|
configuration. |
209
|
|
|
|
210
|
|
|
If it should exist but doesn't exist yet, the function creates it |
211
|
|
|
with all values as default. |
212
|
|
|
If it should exist and already exists, it returns the value. |
213
|
|
|
Otherwise, the function returns None. |
214
|
|
|
|
215
|
|
|
:return: a dictionary of strings |
216
|
|
|
""" |
217
|
|
|
# Getting cookie |
218
|
|
|
cookie_raw = self.request.get(DASHBOARD_FILTER_COOKIE, None) |
219
|
|
|
# If it doesn't exist, create it with default values |
220
|
|
|
if cookie_raw is None: |
221
|
|
|
cookie_raw = self._create_raw_data() |
222
|
|
|
self.request.response.setCookie( |
223
|
|
|
DASHBOARD_FILTER_COOKIE, |
224
|
|
|
json.dumps(cookie_raw), |
225
|
|
|
quoted=False, |
226
|
|
|
path='/') |
227
|
|
|
return cookie_raw |
228
|
|
|
return get_strings(json.loads(cookie_raw)) |
229
|
|
|
|
230
|
|
|
def is_filter_selected(self, selection_id, value): |
231
|
|
|
""" |
232
|
|
|
Compares whether the 'selection_id' parameter value saved in the |
233
|
|
|
cookie is the same value as the "value" parameter. |
234
|
|
|
|
235
|
|
|
:param selection_id: a string as a dashboard_cookie key. |
236
|
|
|
:param value: The value to compare against the value from |
237
|
|
|
dashboard_cookie key. |
238
|
|
|
:return: Boolean. |
239
|
|
|
""" |
240
|
|
|
selected = self.dashboard_cookie.get(selection_id) |
241
|
|
|
return selected == value |
242
|
|
|
|
243
|
|
|
def is_admin_user(self): |
244
|
|
|
""" |
245
|
|
|
Checks if the user is the admin or a SiteAdmin user. |
246
|
|
|
:return: Boolean |
247
|
|
|
""" |
248
|
|
|
user = api.user.get_current() |
249
|
|
|
roles = user.getRoles() |
250
|
|
|
return "LabManager" in roles or "Manager" in roles |
251
|
|
|
|
252
|
|
|
def _create_raw_data(self): |
253
|
|
|
""" |
254
|
|
|
Gathers the different sections ids and creates a string as first |
255
|
|
|
cookie data. |
256
|
|
|
|
257
|
|
|
:return: A dictionary like: |
258
|
|
|
{'analyses':'all','analysisrequest':'all','worksheets':'all'} |
259
|
|
|
""" |
260
|
|
|
result = {} |
261
|
|
|
for section in self.get_sections(): |
262
|
|
|
result[section.get('id')] = 'all' |
263
|
|
|
return result |
264
|
|
|
|
265
|
|
|
def get_date_range(self, periodicity=PERIODICITY_WEEKLY): |
266
|
|
|
"""Returns a date range (date from, date to) that suits with the passed |
267
|
|
|
in periodicity. |
268
|
|
|
|
269
|
|
|
:param periodicity: string that represents the periodicity |
270
|
|
|
:type periodicity: str |
271
|
|
|
:return: A date range |
272
|
|
|
:rtype: [(DateTime, DateTime)] |
273
|
|
|
""" |
274
|
|
|
today = datetime.date.today() |
275
|
|
|
if periodicity == PERIODICITY_DAILY: |
276
|
|
|
# Daily, load last 30 days |
277
|
|
|
date_from = DateTime() - 30 |
278
|
|
|
date_to = DateTime() + 1 |
279
|
|
|
return date_from, date_to |
280
|
|
|
|
281
|
|
|
if periodicity == PERIODICITY_MONTHLY: |
282
|
|
|
# Monthly, load last 2 years |
283
|
|
|
min_year = today.year - 1 if today.month == 12 else today.year - 2 |
284
|
|
|
min_month = 1 if today.month == 12 else today.month |
285
|
|
|
date_from = DateTime(min_year, min_month, 1) |
286
|
|
|
date_to = DateTime(today.year, today.month, |
287
|
|
|
monthrange(today.year, today.month)[1], |
288
|
|
|
23, 59, 59) |
289
|
|
|
return date_from, date_to |
290
|
|
|
|
291
|
|
|
if periodicity == PERIODICITY_QUARTERLY: |
292
|
|
|
# Quarterly, load last 4 years |
293
|
|
|
m = (((today.month - 1) / 3) * 3) + 1 |
294
|
|
|
min_year = today.year - 4 if today.month == 12 else today.year - 5 |
295
|
|
|
date_from = DateTime(min_year, m, 1) |
296
|
|
|
date_to = DateTime(today.year, m + 2, |
297
|
|
|
monthrange(today.year, m + 2)[1], 23, 59, |
298
|
|
|
59) |
299
|
|
|
return date_from, date_to |
300
|
|
|
if periodicity == PERIODICITY_BIANNUAL: |
301
|
|
|
# Biannual, load last 10 years |
302
|
|
|
m = (((today.month - 1) / 6) * 6) + 1 |
303
|
|
|
min_year = today.year - 10 if today.month == 12 else today.year - 11 |
304
|
|
|
date_from = DateTime(min_year, m, 1) |
305
|
|
|
date_to = DateTime(today.year, m + 5, |
306
|
|
|
monthrange(today.year, m + 5)[1], 23, 59, |
307
|
|
|
59) |
308
|
|
|
return date_from, date_to |
309
|
|
|
|
310
|
|
|
if periodicity in [PERIODICITY_YEARLY, PERIODICITY_ALL]: |
311
|
|
|
# Yearly or All time, load last 15 years |
312
|
|
|
min_year = today.year - 15 if today.month == 12 else today.year - 16 |
313
|
|
|
date_from = DateTime(min_year, 1, 1) |
314
|
|
|
date_to = DateTime(today.year, 12, 31, 23, 59, 59) |
315
|
|
|
return date_from, date_to |
316
|
|
|
|
317
|
|
|
# Default Weekly, load last six months |
318
|
|
|
year, weeknum, dow = today.isocalendar() |
319
|
|
|
min_year = today.year if today.month > 6 else today.year - 1 |
320
|
|
|
min_month = today.month - 6 if today.month > 6 \ |
321
|
|
|
else (today.month - 6) + 12 |
322
|
|
|
date_from = DateTime(min_year, min_month, 1) |
323
|
|
|
date_to = DateTime() - dow + 7 |
324
|
|
|
return date_from, date_to |
325
|
|
|
|
326
|
|
|
def get_sections(self): |
327
|
|
|
""" Returns an array with the sections to be displayed. |
328
|
|
|
Every section is a dictionary with the following structure: |
329
|
|
|
{'id': <section_identifier>, |
330
|
|
|
'title': <section_title>, |
331
|
|
|
'panels': <array of panels>} |
332
|
|
|
""" |
333
|
|
|
sections = [] |
334
|
|
|
user = api.user.get_current() |
335
|
|
|
if is_panel_visible_for_user('analyses', user): |
336
|
|
|
sections.append(self.get_analyses_section()) |
337
|
|
|
if is_panel_visible_for_user('analysisrequests', user): |
338
|
|
|
sections.append(self.get_analysisrequests_section()) |
339
|
|
|
if is_panel_visible_for_user('worksheets', user): |
340
|
|
|
sections.append(self.get_worksheets_section()) |
341
|
|
|
return sections |
342
|
|
|
|
343
|
|
|
def get_filter_options(self): |
344
|
|
|
""" |
345
|
|
|
Returns dasboard filter options. |
346
|
|
|
:return: Boolean |
347
|
|
|
""" |
348
|
|
|
dash_opt = DisplayList(( |
349
|
|
|
('all', _('All')), |
350
|
|
|
('mine', _('Mine')), |
351
|
|
|
)) |
352
|
|
|
return dash_opt |
353
|
|
|
|
354
|
|
|
def _getStatistics(self, name, description, url, catalog, criterias, total): |
355
|
|
|
out = {'type': 'simple-panel', |
356
|
|
|
'name': name, |
357
|
|
|
'class': 'informative', |
358
|
|
|
'description': description, |
359
|
|
|
'total': total, |
360
|
|
|
'link': self.portal_url + '/' + url} |
361
|
|
|
|
362
|
|
|
results = 0 |
363
|
|
|
ratio = 0 |
364
|
|
|
if total > 0: |
365
|
|
|
results = self.search_count(criterias, catalog.id) |
366
|
|
|
results = results if total >= results else total |
367
|
|
|
ratio = (float(results)/float(total))*100 if results > 0 else 0 |
368
|
|
|
ratio = str("%%.%sf" % 1) % ratio |
369
|
|
|
out['legend'] = _('of') + " " + str(total) + ' (' + ratio + '%)' |
370
|
|
|
out['number'] = results |
371
|
|
|
out['percentage'] = float(ratio) |
372
|
|
|
return out |
373
|
|
|
|
374
|
|
|
def get_analysisrequests_section(self): |
375
|
|
|
""" Returns the section dictionary related with Analysis |
376
|
|
|
Requests, that contains some informative panels (like |
377
|
|
|
ARs to be verified, ARs to be published, etc.) |
378
|
|
|
""" |
379
|
|
|
out = [] |
380
|
|
|
catalog = getToolByName(self.context, CATALOG_ANALYSIS_REQUEST_LISTING) |
381
|
|
|
query = {'portal_type': "AnalysisRequest", |
382
|
|
|
'is_active': True} |
383
|
|
|
|
384
|
|
|
# Check if dashboard_cookie contains any values to query |
385
|
|
|
# elements by |
386
|
|
|
query = self._update_criteria_with_filters(query, 'analysisrequests') |
387
|
|
|
|
388
|
|
|
# Active Samples (All) |
389
|
|
|
total = self.search_count(query, catalog.id) |
390
|
|
|
|
391
|
|
|
# Sampling workflow enabled? |
392
|
|
|
if self.context.bika_setup.getSamplingWorkflowEnabled(): |
393
|
|
|
# Samples awaiting to be sampled or scheduled |
394
|
|
|
name = _('Samples to be sampled') |
395
|
|
|
desc = _("To be sampled") |
396
|
|
|
purl = 'samples?samples_review_state=to_be_sampled' |
397
|
|
|
query['review_state'] = ['to_be_sampled', ] |
398
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
399
|
|
|
|
400
|
|
|
# Samples awaiting to be preserved |
401
|
|
|
name = _('Samples to be preserved') |
402
|
|
|
desc = _("To be preserved") |
403
|
|
|
purl = 'samples?samples_review_state=to_be_preserved' |
404
|
|
|
query['review_state'] = ['to_be_preserved', ] |
405
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
406
|
|
|
|
407
|
|
|
# Samples scheduled for Sampling |
408
|
|
|
name = _('Samples scheduled for sampling') |
409
|
|
|
desc = _("Sampling scheduled") |
410
|
|
|
purl = 'samples?samples_review_state=scheduled_sampling' |
411
|
|
|
query['review_state'] = ['scheduled_sampling', ] |
412
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
413
|
|
|
|
414
|
|
|
# Samples awaiting for reception |
415
|
|
|
name = _('Samples to be received') |
416
|
|
|
desc = _("Reception pending") |
417
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=sample_due' |
418
|
|
|
query['review_state'] = ['sample_due', ] |
419
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
420
|
|
|
|
421
|
|
|
# Samples under way |
422
|
|
|
name = _('Samples with results pending') |
423
|
|
|
desc = _("Results pending") |
424
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=sample_received' |
425
|
|
|
query['review_state'] = ['attachment_due', |
426
|
|
|
'sample_received', ] |
427
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
428
|
|
|
|
429
|
|
|
# Samples to be verified |
430
|
|
|
name = _('Samples to be verified') |
431
|
|
|
desc = _("To be verified") |
432
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=to_be_verified' |
433
|
|
|
query['review_state'] = ['to_be_verified', ] |
434
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
435
|
|
|
|
436
|
|
|
# Samples verified (to be published) |
437
|
|
|
name = _('Samples verified') |
438
|
|
|
desc = _("Verified") |
439
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=verified' |
440
|
|
|
query['review_state'] = ['verified', ] |
441
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
442
|
|
|
|
443
|
|
|
# Samples published |
444
|
|
|
name = _('Samples published') |
445
|
|
|
desc = _("Published") |
446
|
|
|
purl = 'analysisrequests?analysisrequests_review_state=published' |
447
|
|
|
query['review_state'] = ['published', ] |
448
|
|
|
out.append(self._getStatistics(name, desc, purl, catalog, query, total)) |
449
|
|
|
|
450
|
|
|
# Samples to be printed |
451
|
|
|
if self.context.bika_setup.getPrintingWorkflowEnabled(): |
452
|
|
|
name = _('Samples to be printed') |
453
|
|
|
desc = _("To be printed") |
454
|
|
|
purl = 'analysisrequests?analysisrequests_getPrinted=0' |
455
|
|
|
query['getPrinted'] = '0' |
456
|
|
|
query['review_state'] = ['published', ] |
457
|
|
|
out.append( |
458
|
|
|
self._getStatistics(name, desc, purl, catalog, query, total)) |
459
|
|
|
|
460
|
|
|
# Chart with the evolution of ARs over a period, grouped by |
461
|
|
|
# periodicity |
462
|
|
|
outevo = self.fill_dates_evo(catalog, query) |
463
|
|
|
out.append({'type': 'bar-chart-panel', |
464
|
|
|
'name': _('Evolution of Samples'), |
465
|
|
|
'class': 'informative', |
466
|
|
|
'description': _('Evolution of Samples'), |
467
|
|
|
'data': json.dumps(outevo), |
468
|
|
|
'datacolors': json.dumps(self.get_colors_palette())}) |
469
|
|
|
|
470
|
|
|
return {'id': 'analysisrequests', |
471
|
|
|
'title': _('Samples'), |
472
|
|
|
'panels': out} |
473
|
|
|
|
474
|
|
|
def get_worksheets_section(self): |
475
|
|
|
""" Returns the section dictionary related with Worksheets, |
476
|
|
|
that contains some informative panels (like |
477
|
|
|
WS to be verified, WS with results pending, etc.) |
478
|
|
|
""" |
479
|
|
|
out = [] |
480
|
|
|
bc = getToolByName(self.context, CATALOG_WORKSHEET_LISTING) |
481
|
|
|
query = {'portal_type': "Worksheet", } |
482
|
|
|
|
483
|
|
|
# Check if dashboard_cookie contains any values to query |
484
|
|
|
# elements by |
485
|
|
|
query = self._update_criteria_with_filters(query, 'worksheets') |
486
|
|
|
|
487
|
|
|
# Active Worksheets (all) |
488
|
|
|
total = self.search_count(query, bc.id) |
489
|
|
|
|
490
|
|
|
# Open worksheets |
491
|
|
|
name = _('Results pending') |
492
|
|
|
desc = _('Results pending') |
493
|
|
|
purl = 'worksheets?list_review_state=open' |
494
|
|
|
query['review_state'] = ['open', 'attachment_due'] |
495
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
496
|
|
|
|
497
|
|
|
# Worksheets to be verified |
498
|
|
|
name = _('To be verified') |
499
|
|
|
desc = _('To be verified') |
500
|
|
|
purl = 'worksheets?list_review_state=to_be_verified' |
501
|
|
|
query['review_state'] = ['to_be_verified', ] |
502
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
503
|
|
|
|
504
|
|
|
# Worksheets verified |
505
|
|
|
name = _('Verified') |
506
|
|
|
desc = _('Verified') |
507
|
|
|
purl = 'worksheets?list_review_state=verified' |
508
|
|
|
query['review_state'] = ['verified', ] |
509
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
510
|
|
|
|
511
|
|
|
# Chart with the evolution of WSs over a period, grouped by |
512
|
|
|
# periodicity |
513
|
|
|
outevo = self.fill_dates_evo(bc, query) |
514
|
|
|
out.append({'type': 'bar-chart-panel', |
515
|
|
|
'name': _('Evolution of Worksheets'), |
516
|
|
|
'class': 'informative', |
517
|
|
|
'description': _('Evolution of Worksheets'), |
518
|
|
|
'data': json.dumps(outevo), |
519
|
|
|
'datacolors': json.dumps(self.get_colors_palette())}) |
520
|
|
|
|
521
|
|
|
return {'id': 'worksheets', |
522
|
|
|
'title': _('Worksheets'), |
523
|
|
|
'panels': out} |
524
|
|
|
|
525
|
|
|
def get_analyses_section(self): |
526
|
|
|
""" Returns the section dictionary related with Analyses, |
527
|
|
|
that contains some informative panels (analyses pending |
528
|
|
|
analyses assigned, etc.) |
529
|
|
|
""" |
530
|
|
|
out = [] |
531
|
|
|
bc = getToolByName(self.context, CATALOG_ANALYSIS_LISTING) |
532
|
|
|
query = {'portal_type': "Analysis", 'is_active': True} |
533
|
|
|
|
534
|
|
|
# Check if dashboard_cookie contains any values to query elements by |
535
|
|
|
query = self._update_criteria_with_filters(query, 'analyses') |
536
|
|
|
|
537
|
|
|
# Active Analyses (All) |
538
|
|
|
total = self.search_count(query, bc.id) |
539
|
|
|
|
540
|
|
|
# Analyses to be assigned |
541
|
|
|
name = _('Assignment pending') |
542
|
|
|
desc = _('Assignment pending') |
543
|
|
|
purl = '#' |
544
|
|
|
query['review_state'] = ['unassigned'] |
545
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
546
|
|
|
|
547
|
|
|
# Analyses pending |
548
|
|
|
name = _('Results pending') |
549
|
|
|
desc = _('Results pending') |
550
|
|
|
purl = '#' |
551
|
|
|
query['review_state'] = ['unassigned', 'assigned', ] |
552
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
553
|
|
|
|
554
|
|
|
# Analyses to be verified |
555
|
|
|
name = _('To be verified') |
556
|
|
|
desc = _('To be verified') |
557
|
|
|
purl = '#' |
558
|
|
|
query['review_state'] = ['to_be_verified', ] |
559
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
560
|
|
|
|
561
|
|
|
# Analyses verified |
562
|
|
|
name = _('Verified') |
563
|
|
|
desc = _('Verified') |
564
|
|
|
purl = '#' |
565
|
|
|
query['review_state'] = ['verified', ] |
566
|
|
|
out.append(self._getStatistics(name, desc, purl, bc, query, total)) |
567
|
|
|
|
568
|
|
|
# Chart with the evolution of Analyses over a period, grouped by |
569
|
|
|
# periodicity |
570
|
|
|
outevo = self.fill_dates_evo(bc, query) |
571
|
|
|
out.append({'type': 'bar-chart-panel', |
572
|
|
|
'name': _('Evolution of Analyses'), |
573
|
|
|
'class': 'informative', |
574
|
|
|
'description': _('Evolution of Analyses'), |
575
|
|
|
'data': json.dumps(outevo), |
576
|
|
|
'datacolors': json.dumps(self.get_colors_palette())}) |
577
|
|
|
return {'id': 'analyses', |
578
|
|
|
'title': _('Analyses'), |
579
|
|
|
'panels': out} |
580
|
|
|
|
581
|
|
|
def get_states_map(self, portal_type): |
582
|
|
|
if portal_type == 'Analysis': |
583
|
|
|
return {'unassigned': _('Assignment pending'), |
584
|
|
|
'assigned': _('Results pending'), |
585
|
|
|
'to_be_verified': _('To be verified'), |
586
|
|
|
'rejected': _('Rejected'), |
587
|
|
|
'retracted': _('Retracted'), |
588
|
|
|
'verified': _('Verified'), |
589
|
|
|
'published': _('Published')} |
590
|
|
|
elif portal_type == 'AnalysisRequest': |
591
|
|
|
return {'to_be_sampled': _('To be sampled'), |
592
|
|
|
'to_be_preserved': _('To be preserved'), |
593
|
|
|
'scheduled_sampling': _('Sampling scheduled'), |
594
|
|
|
'sample_due': _('Reception pending'), |
595
|
|
|
'rejected': _('Rejected'), |
596
|
|
|
'invalid': _('Invalid'), |
597
|
|
|
'sample_received': _('Results pending'), |
598
|
|
|
'assigned': _('Results pending'), |
599
|
|
|
'attachment_due': _('Results pending'), |
600
|
|
|
'to_be_verified': _('To be verified'), |
601
|
|
|
'verified': _('Verified'), |
602
|
|
|
'published': _('Published')} |
603
|
|
|
elif portal_type == 'Worksheet': |
604
|
|
|
return {'open': _('Results pending'), |
605
|
|
|
'attachment_due': _('Results pending'), |
606
|
|
|
'to_be_verified': _('To be verified'), |
607
|
|
|
'verified': _('Verified')} |
608
|
|
|
|
609
|
|
|
def get_colors_palette(self): |
610
|
|
|
return { |
611
|
|
|
'to_be_sampled': '#917A4C', |
612
|
|
|
_('To be sampled'): '#917A4C', |
613
|
|
|
|
614
|
|
|
'to_be_preserved': '#C2803E', |
615
|
|
|
_('To be preserved'): '#C2803E', |
616
|
|
|
|
617
|
|
|
'scheduled_sampling': '#F38630', |
618
|
|
|
_('Sampling scheduled'): '#F38630', |
619
|
|
|
|
620
|
|
|
'sample_due': '#FA6900', |
621
|
|
|
_('Reception pending'): '#FA6900', |
622
|
|
|
|
623
|
|
|
'sample_received': '#E0E4CC', |
624
|
|
|
_('Assignment pending'): '#E0E4CC', |
625
|
|
|
_('Sample received'): '#E0E4CC', |
626
|
|
|
|
627
|
|
|
'assigned': '#dcdcdc', |
628
|
|
|
'attachment_due': '#dcdcdc', |
629
|
|
|
'open': '#dcdcdc', |
630
|
|
|
_('Results pending'): '#dcdcdc', |
631
|
|
|
|
632
|
|
|
'rejected': '#FF6B6B', |
633
|
|
|
'retracted': '#FF6B6B', |
634
|
|
|
_('Rejected'): '#FF6B6B', |
635
|
|
|
_('Retracted'): '#FF6B6B', |
636
|
|
|
|
637
|
|
|
'invalid': '#C44D58', |
638
|
|
|
_('Invalid'): '#C44D58', |
639
|
|
|
|
640
|
|
|
'to_be_verified': '#A7DBD8', |
641
|
|
|
_('To be verified'): '#A7DBD8', |
642
|
|
|
|
643
|
|
|
'verified': '#69D2E7', |
644
|
|
|
_('Verified'): '#69D2E7', |
645
|
|
|
|
646
|
|
|
'published': '#83AF9B', |
647
|
|
|
_('Published'): '#83AF9B', |
648
|
|
|
} |
649
|
|
|
|
650
|
|
|
def _getDateStr(self, period, created): |
651
|
|
|
if period == PERIODICITY_YEARLY: |
652
|
|
|
created = created.year() |
653
|
|
|
elif period == PERIODICITY_BIANNUAL: |
654
|
|
|
m = (((created.month()-1)/6)*6)+1 |
655
|
|
|
created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2)) |
656
|
|
|
elif period == PERIODICITY_QUARTERLY: |
657
|
|
|
m = (((created.month()-1)/3)*3)+1 |
658
|
|
|
created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2)) |
659
|
|
|
elif period == PERIODICITY_MONTHLY: |
660
|
|
|
created = '%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2)) |
661
|
|
|
elif period == PERIODICITY_WEEKLY: |
662
|
|
|
d = (((created.day()-1)/7)*7)+1 |
663
|
|
|
year, weeknum, dow = created.asdatetime().isocalendar() |
664
|
|
|
created = created - dow |
665
|
|
|
created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2)) |
666
|
|
|
elif period == PERIODICITY_ALL: |
667
|
|
|
# All time, but evolution chart grouped by year |
668
|
|
|
created = created.year() |
669
|
|
|
else: |
670
|
|
|
created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2)) |
671
|
|
|
return created |
672
|
|
|
|
673
|
|
|
def fill_dates_evo(self, catalog, query): |
674
|
|
|
sorted_query = collections.OrderedDict(sorted(query.items())) |
675
|
|
|
query_json = json.dumps(sorted_query) |
676
|
|
|
return self._fill_dates_evo(query_json, catalog.id, self.periodicity) |
677
|
|
|
|
678
|
|
|
def _fill_dates_evo_cachekey(method, self, query_json, catalog_name, |
679
|
|
|
periodicity): |
680
|
|
|
hour = time() // (60 * 60 * 2) |
681
|
|
|
return hour, catalog_name, query_json, periodicity |
682
|
|
|
|
683
|
|
|
@ram.cache(_fill_dates_evo_cachekey) |
684
|
|
|
def _fill_dates_evo(self, query_json, catalog_name, periodicity): |
685
|
|
|
"""Returns an array of dictionaries, where each dictionary contains the |
686
|
|
|
amount of items created at a given date and grouped by review_state, |
687
|
|
|
based on the passed in periodicity. |
688
|
|
|
|
689
|
|
|
This is an expensive function that will not be called more than once |
690
|
|
|
every 2 hours (note cache decorator with `time() // (60 * 60 * 2) |
691
|
|
|
""" |
692
|
|
|
outevoidx = {} |
693
|
|
|
outevo = [] |
694
|
|
|
days = 1 |
695
|
|
|
if periodicity == PERIODICITY_YEARLY: |
696
|
|
|
days = 336 |
697
|
|
|
elif periodicity == PERIODICITY_BIANNUAL: |
698
|
|
|
days = 168 |
699
|
|
|
elif periodicity == PERIODICITY_QUARTERLY: |
700
|
|
|
days = 84 |
701
|
|
|
elif periodicity == PERIODICITY_MONTHLY: |
702
|
|
|
days = 28 |
703
|
|
|
elif periodicity == PERIODICITY_WEEKLY: |
704
|
|
|
days = 7 |
705
|
|
|
elif periodicity == PERIODICITY_ALL: |
706
|
|
|
days = 336 |
707
|
|
|
|
708
|
|
|
# Get the date range |
709
|
|
|
date_from, date_to = self.get_date_range(periodicity) |
710
|
|
|
query = json.loads(query_json) |
711
|
|
|
if 'review_state' in query: |
712
|
|
|
del query['review_state'] |
713
|
|
|
query['sort_on'] = 'created' |
714
|
|
|
query['created'] = {'query': (date_from, date_to), |
715
|
|
|
'range': 'min:max'} |
716
|
|
|
|
717
|
|
|
otherstate = _('Other status') |
718
|
|
|
statesmap = self.get_states_map(query['portal_type']) |
719
|
|
|
stats = statesmap.values() |
720
|
|
|
stats.sort() |
721
|
|
|
stats.append(otherstate) |
722
|
|
|
statscount = {s: 0 for s in stats} |
723
|
|
|
# Add first all periods, cause we want all segments to be displayed |
724
|
|
|
curr = date_from.asdatetime() |
725
|
|
|
end = date_to.asdatetime() |
726
|
|
|
while curr < end: |
727
|
|
|
currstr = self._getDateStr(periodicity, DateTime(curr)) |
728
|
|
|
if currstr not in outevoidx: |
729
|
|
|
outdict = {'date': currstr} |
730
|
|
|
for k in stats: |
731
|
|
|
outdict[k] = 0 |
732
|
|
|
outevo.append(outdict) |
733
|
|
|
outevoidx[currstr] = len(outevo)-1 |
734
|
|
|
curr = curr + datetime.timedelta(days=days) |
735
|
|
|
|
736
|
|
|
brains = search(query, catalog_name) |
737
|
|
|
for brain in brains: |
738
|
|
|
created = brain.created |
739
|
|
|
state = brain.review_state |
740
|
|
|
if state not in statesmap: |
741
|
|
|
logger.warn("'%s' State for '%s' not available" % (state, query['portal_type'])) |
742
|
|
|
state = statesmap[state] if state in statesmap else otherstate |
743
|
|
|
created = self._getDateStr(periodicity, created) |
744
|
|
|
statscount[state] += 1 |
745
|
|
|
if created in outevoidx: |
746
|
|
|
oidx = outevoidx[created] |
747
|
|
|
if state in outevo[oidx]: |
748
|
|
|
outevo[oidx][state] += 1 |
749
|
|
|
else: |
750
|
|
|
outevo[oidx][state] = 1 |
751
|
|
|
else: |
752
|
|
|
# Create new row |
753
|
|
|
currow = {'date': created, |
754
|
|
|
state: 1} |
755
|
|
|
outevo.append(currow) |
756
|
|
|
|
757
|
|
|
# Remove all those states for which there is no data |
758
|
|
|
rstates = [k for k, v in statscount.items() if v == 0] |
759
|
|
|
for o in outevo: |
760
|
|
|
for r in rstates: |
761
|
|
|
if r in o: |
762
|
|
|
del o[r] |
763
|
|
|
|
764
|
|
|
# Sort available status by number of occurences descending |
765
|
|
|
sorted_states = sorted(statscount.items(), key=itemgetter(1)) |
766
|
|
|
sorted_states = map(lambda item: item[0], sorted_states) |
767
|
|
|
sorted_states.reverse() |
768
|
|
|
return {'data': outevo, 'states': sorted_states} |
769
|
|
|
|
770
|
|
|
def search_count(self, query, catalog_name): |
771
|
|
|
sorted_query = collections.OrderedDict(sorted(query.items())) |
772
|
|
|
query_json = json.dumps(sorted_query) |
773
|
|
|
return self._search_count(query_json, catalog_name) |
774
|
|
|
|
775
|
|
|
@viewcache.memoize |
776
|
|
|
def _search_count(self, query_json, catalog_name): |
777
|
|
|
query = json.loads(query_json) |
778
|
|
|
brains = search(query, catalog_name) |
779
|
|
|
return len(brains) |
780
|
|
|
|
781
|
|
|
def _update_criteria_with_filters(self, query, section_name): |
782
|
|
|
""" |
783
|
|
|
This method updates the 'query' dictionary with the criteria stored in |
784
|
|
|
dashboard cookie. |
785
|
|
|
|
786
|
|
|
:param query: A dictionary with search criteria. |
787
|
|
|
:param section_name: The dashboard section name |
788
|
|
|
:return: The 'query' dictionary |
789
|
|
|
""" |
790
|
|
|
if self.dashboard_cookie is None: |
791
|
|
|
return query |
792
|
|
|
cookie_criteria = self.dashboard_cookie.get(section_name) |
793
|
|
|
if cookie_criteria == 'mine': |
794
|
|
|
query['Creator'] = self.member.getId() |
795
|
|
|
return query |
796
|
|
|
|
797
|
|
|
def get_dashboard_panels_visibility(self, section_name): |
798
|
|
|
""" |
799
|
|
|
Return a list of pairs as values that represents the role-permission |
800
|
|
|
view relation for the panel section. |
801
|
|
|
:param section_name: the panels section id. |
802
|
|
|
:return: a list of tuples. |
803
|
|
|
""" |
804
|
|
|
return get_dashboard_panels_visibility_by_section(section_name) |
805
|
|
|
|
806
|
|
|
|
807
|
|
|
class DashboardViewPermissionUpdate(BrowserView): |
808
|
|
|
""" |
809
|
|
|
Updates the values in 'bika.lims.dashboard_panels_visibility' registry. |
810
|
|
|
""" |
811
|
|
|
|
812
|
|
|
def __call__(self): |
813
|
|
|
protect.CheckAuthenticator(self.request) |
814
|
|
|
# Getting values from post |
815
|
|
|
section_name = self.request.get('section_name', None) |
816
|
|
|
if section_name is None: |
817
|
|
|
return None |
818
|
|
|
role_id = self.request.get('role_id', None) |
819
|
|
|
if role_id is None: |
820
|
|
|
return None |
821
|
|
|
check_state = self.request.get('check_state', None) |
822
|
|
|
if check_state is None: |
823
|
|
|
return None |
824
|
|
|
elif check_state == 'false': |
825
|
|
|
check_state = 'no' |
826
|
|
|
else: |
827
|
|
|
check_state = 'yes' |
828
|
|
|
# Update registry |
829
|
|
|
registry_info = get_dashboard_registry_record() |
830
|
|
|
pairs = get_dashboard_panels_visibility_by_section(section_name) |
831
|
|
|
role_permissions = list() |
832
|
|
|
for pair in pairs: |
833
|
|
|
visibility = pair[1] |
834
|
|
|
if pair[0] == role_id: |
835
|
|
|
visibility = check_state |
836
|
|
|
value = '{0},{1}'.format(pair[0], visibility) |
837
|
|
|
role_permissions.append(value) |
838
|
|
|
role_permissions = ','.join(role_permissions) |
839
|
|
|
# Set permissions string into dict |
840
|
|
|
registry_info[section_name] = get_unicode(role_permissions) |
841
|
|
|
set_dashboard_registry_record(registry_info) |
842
|
|
|
return True |
843
|
|
|
|