1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# This file is part of SENAITE.CORE |
4
|
|
|
# |
5
|
|
|
# Copyright 2018 by it's authors. |
6
|
|
|
# Some rights reserved. See LICENSE.rst, CONTRIBUTORS.rst. |
7
|
|
|
|
8
|
|
|
import magnitude |
9
|
|
|
from Products.CMFPlone.utils import _createObjectByType |
10
|
|
|
from Products.CMFPlone.utils import safe_unicode |
11
|
|
|
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile |
12
|
|
|
from plone.memoize.volatile import cache |
13
|
|
|
from zope.interface import implements |
14
|
|
|
from zope.publisher.interfaces import IPublishTraverse |
15
|
|
|
|
16
|
|
|
from bika.lims import api |
17
|
|
|
from bika.lims import bikaMessageFactory as _ |
18
|
|
|
from bika.lims import logger |
19
|
|
|
from bika.lims.browser.base_add_view import BaseAddView |
20
|
|
|
from bika.lims.browser.base_add_view import BaseAjaxAddView |
21
|
|
|
from bika.lims.browser.base_add_view import BaseManageAddView |
22
|
|
|
from bika.lims.utils import cache_key |
23
|
|
|
from bika.lims.utils import tmpID |
24
|
|
|
from bika.lims.utils.analysisrequest import create_analysisrequest as crar |
25
|
|
|
|
26
|
|
|
|
27
|
|
|
def mg(value): |
28
|
|
|
"""Copied from bika.lims.jsonapi.v1.calculate_partitions |
29
|
|
|
""" |
30
|
|
|
tokens = value.split(" ") if value else [0, ''] |
31
|
|
|
val = float(tokens[0]) if isinstance(tokens[0], (int, long)) else 0 |
|
|
|
|
32
|
|
|
unit = tokens[1] if len(tokens) > 1 else '' |
33
|
|
|
# Magnitude doesn't support mL units. |
34
|
|
|
# Since mL is commonly used instead of ml to avoid confusion with the |
35
|
|
|
# number one, add "L" (for liter) as a 'recognizable' unit. |
36
|
|
|
# L unit as liter is also recommended by the NIST Guide |
37
|
|
|
# http://physics.nist.gov/Pubs/SP811/sec05.html#table6 |
38
|
|
|
# Further info: https://jira.bikalabs.com/browse/LIMS-1441 |
39
|
|
|
unit = unit[:-1] + 'l' if unit.endswith('L') else unit |
40
|
|
|
return magnitude.mg(val, unit) |
41
|
|
|
|
42
|
|
|
|
43
|
|
|
class AnalysisRequestAddView(BaseAddView): |
44
|
|
|
"""AR Add view |
45
|
|
|
""" |
46
|
|
|
template = ViewPageTemplateFile("templates/ar_add2.pt") |
47
|
|
|
|
48
|
|
|
def __init__(self, context, request): |
49
|
|
|
BaseAddView.__init__(self, context, request) |
50
|
|
|
self.SKIP_FIELD_ON_COPY = ["Sample"] |
51
|
|
|
|
52
|
|
|
def __call__(self): |
53
|
|
|
BaseAddView.__call__(self) |
54
|
|
|
self.icon = self.portal_url + \ |
55
|
|
|
"/++resource++bika.lims.images/analysisrequest_big.png" |
56
|
|
|
self.fieldvalues = self.generate_fieldvalues(self.obj_count) |
57
|
|
|
self.specifications = self.generate_specifications(self.obj_count) |
58
|
|
|
logger.info("*** Prepared data for {} ARs ***".format(self.obj_count)) |
59
|
|
View Code Duplication |
return self.template() |
|
|
|
|
60
|
|
|
|
61
|
|
|
def get_object_by_uid(self, uid): |
62
|
|
|
"""Get the object by UID |
63
|
|
|
""" |
64
|
|
|
logger.debug("get_object_by_uid::UID={}".format(uid)) |
65
|
|
|
obj = api.get_object_by_uid(uid, None) |
66
|
|
|
if obj is None: |
67
|
|
|
logger.warn("!! No object found for UID #{} !!") |
68
|
|
|
return obj |
69
|
|
|
|
70
|
|
|
def is_ar_specs_allowed(self): |
71
|
|
|
"""Checks if AR Specs are allowed |
72
|
|
|
""" |
73
|
|
|
bika_setup = api.get_bika_setup() |
74
|
|
|
return bika_setup.getEnableARSpecs() |
75
|
|
|
|
76
|
|
|
def get_drymatter_service(self): |
77
|
|
|
"""The analysis to be used for determining dry matter |
78
|
|
|
""" |
79
|
|
|
bika_setup = api.get_bika_setup() |
80
|
|
|
return bika_setup.getDryMatterService() |
81
|
|
|
|
82
|
|
|
def get_obj(self): |
83
|
|
|
"""Create a temporary AR to fetch the fields from |
84
|
|
|
""" |
85
|
|
|
if not self.tmp_obj: |
86
|
|
|
logger.info("*** CREATING TEMPORARY AR ***") |
87
|
|
|
self.tmp_obj = self.context.restrictedTraverse( |
88
|
|
|
"portal_factory/AnalysisRequest/Request new analyses") |
89
|
|
|
return self.tmp_obj |
90
|
|
|
|
91
|
|
|
def generate_specifications(self, count=1): |
92
|
|
|
"""Returns a mapping of count -> specification |
93
|
|
|
""" |
94
|
|
|
|
95
|
|
|
out = {} |
96
|
|
|
|
97
|
|
|
# mapping of UID index to AR objects {1: <AR1>, 2: <AR2> ...} |
98
|
|
|
copy_from = self.get_copy_from() |
99
|
|
|
|
100
|
|
|
for arnum in range(count): |
101
|
|
|
# get the source object |
102
|
|
|
source = copy_from.get(arnum) |
103
|
|
|
|
104
|
|
|
if source is None: |
105
|
|
|
out[arnum] = {} |
106
|
|
|
continue |
107
|
|
|
|
108
|
|
|
# get the results range from the source object |
109
|
|
|
results_range = source.getResultsRange() |
110
|
|
|
|
111
|
|
|
# mapping of keyword -> rr specification |
112
|
|
|
specification = {} |
113
|
|
|
for rr in results_range: |
114
|
|
|
specification[rr.get("keyword")] = rr |
115
|
|
|
out[arnum] = specification |
116
|
|
|
|
117
|
|
|
return out |
118
|
|
|
|
119
|
|
|
def get_parent_ar(self, ar): |
120
|
|
|
"""Returns the parent AR |
121
|
|
|
""" |
122
|
|
|
parent = ar.getParentAnalysisRequest() |
123
|
|
|
|
124
|
|
|
# Return immediately if we have no parent |
125
|
|
|
if parent is None: |
126
|
|
|
return None |
127
|
|
|
|
128
|
|
|
# Walk back the chain until we reach the source AR |
129
|
|
|
while True: |
130
|
|
|
pparent = parent.getParentAnalysisRequest() |
131
|
|
|
if pparent is None: |
132
|
|
|
break |
133
|
|
|
# remember the new parent |
134
|
|
|
parent = pparent |
135
|
|
|
|
136
|
|
|
return parent |
137
|
|
|
|
138
|
|
|
def generate_fieldvalues(self, count=1): |
139
|
|
|
"""Returns a mapping of '<fieldname>-<count>' to the default value |
140
|
|
|
of the field or the field value of the source AR |
141
|
|
|
""" |
142
|
|
|
ar_context = self.get_obj() |
143
|
|
|
|
144
|
|
|
# mapping of UID index to AR objects {1: <AR1>, 2: <AR2> ...} |
145
|
|
|
copy_from = self.get_copy_from() |
146
|
|
|
|
147
|
|
|
out = {} |
148
|
|
|
# the original schema fields of an AR (including extended fields) |
149
|
|
|
fields = self.get_obj_fields() |
150
|
|
|
|
151
|
|
|
# generate fields for all requested ARs |
152
|
|
|
for arnum in range(count): |
153
|
|
|
source = copy_from.get(arnum) |
154
|
|
|
parent = None |
155
|
|
|
if source is not None: |
156
|
|
|
parent = self.get_parent_ar(source) |
157
|
|
|
for field in fields: |
158
|
|
|
value = None |
159
|
|
|
fieldname = field.getName() |
160
|
|
|
if source and fieldname not in self.SKIP_FIELD_ON_COPY: |
161
|
|
|
# get the field value stored on the source |
162
|
|
|
context = parent or source |
163
|
|
|
value = self.get_field_value(field, context) |
164
|
|
|
else: |
165
|
|
|
# get the default value of this field |
166
|
|
|
value = self.get_default_value(field, ar_context) |
167
|
|
|
# store the value on the new fieldname |
168
|
|
|
new_fieldname = self.get_fieldname(field, arnum) |
169
|
|
|
out[new_fieldname] = value |
170
|
|
|
|
171
|
|
|
return out |
172
|
|
|
|
173
|
|
|
def get_default_contact(self): |
174
|
|
|
"""Logic refactored from JavaScript: |
175
|
|
|
|
176
|
|
|
* If client only has one contact, and the analysis request comes from |
177
|
|
|
* a client, then Auto-complete first Contact field. |
178
|
|
|
* If client only has one contect, and the analysis request comes from |
179
|
|
|
* a batch, then Auto-complete all Contact field. |
180
|
|
|
|
181
|
|
|
:returns: The default contact for the AR |
182
|
|
|
:rtype: Client object or None |
183
|
|
|
""" |
184
|
|
|
catalog = api.get_tool("portal_catalog") |
185
|
|
|
client = self.get_client() |
186
|
|
|
path = api.get_path(self.context) |
187
|
|
|
if client: |
188
|
|
|
path = api.get_path(client) |
189
|
|
|
query = { |
190
|
|
|
"portal_type": "Contact", |
191
|
|
|
"path": { |
192
|
|
|
"query": path, |
193
|
|
|
"depth": 1 |
194
|
|
|
}, |
195
|
|
|
"incactive_state": "active", |
196
|
|
|
} |
197
|
|
|
contacts = catalog(query) |
198
|
|
|
if len(contacts) == 1: |
199
|
|
|
return api.get_object(contacts[0]) |
200
|
|
|
return None |
201
|
|
|
|
202
|
|
|
def getMemberDiscountApplies(self): |
203
|
|
|
"""Return if the member discount applies for this client |
204
|
|
|
|
205
|
|
|
:returns: True if member discount applies for the client |
206
|
|
|
:rtype: bool |
207
|
|
|
""" |
208
|
|
|
client = self.get_client() |
209
|
|
|
if client is None: |
210
|
|
|
return False |
211
|
|
|
return client.getMemberDiscountApplies() |
212
|
|
|
|
213
|
|
|
def get_fields_with_visibility(self, visibility, mode="add"): |
214
|
|
|
"""Return the AR fields with the current visibility |
215
|
|
|
""" |
216
|
|
|
ar = self.get_obj() |
217
|
|
|
mv = api.get_view("ar_add_manage", context=ar) |
218
|
|
|
mv.get_field_order() |
219
|
|
|
|
220
|
|
|
out = [] |
221
|
|
|
for field in mv.get_fields_with_visibility(visibility, mode): |
222
|
|
|
# check custom field condition |
223
|
|
|
visible = self.is_field_visible(field) |
224
|
|
|
if visible is False and visibility != "hidden": |
225
|
|
|
continue |
226
|
|
|
out.append(field) |
227
|
|
|
return out |
228
|
|
|
|
229
|
|
|
def get_service_categories(self, restricted=True): |
230
|
|
|
"""Return all service categories in the right order |
231
|
|
|
|
232
|
|
|
:param restricted: Client settings restrict categories |
233
|
|
|
:type restricted: bool |
234
|
|
|
:returns: Category catalog results |
235
|
|
|
:rtype: brains |
236
|
|
|
""" |
237
|
|
|
bsc = api.get_tool("bika_setup_catalog") |
238
|
|
|
query = { |
239
|
|
|
"portal_type": "AnalysisCategory", |
240
|
|
|
"inactive_state": "active", |
241
|
|
|
"sort_on": "sortable_title", |
242
|
|
|
} |
243
|
|
|
categories = bsc(query) |
244
|
|
|
client = self.get_client() |
245
|
|
|
if client and restricted: |
246
|
|
|
restricted_categories = client.getRestrictedCategories() |
247
|
|
|
restricted_category_ids = map(lambda c: c.getId(), restricted_categories) |
248
|
|
|
# keep correct order of categories |
249
|
|
|
if restricted_category_ids: |
250
|
|
|
categories = filter(lambda c: c.getId in restricted_category_ids, categories) |
251
|
|
|
return categories |
252
|
|
|
|
253
|
|
|
def get_services(self, poc="lab"): |
254
|
|
|
"""Return all Services |
255
|
|
|
|
256
|
|
|
:param poc: Point of capture (lab/field) |
257
|
|
|
:type poc: string |
258
|
|
|
:returns: Mapping of category -> list of services |
259
|
|
|
:rtype: dict |
260
|
|
|
""" |
261
|
|
|
bsc = api.get_tool("bika_setup_catalog") |
262
|
|
|
query = { |
263
|
|
|
"portal_type": "AnalysisService", |
264
|
|
|
"getPointOfCapture": poc, |
265
|
|
|
"inactive_state": "active", |
266
|
|
|
"sort_on": "sortable_title", |
267
|
|
|
} |
268
|
|
|
services = bsc(query) |
269
|
|
|
categories = self.get_service_categories(restricted=False) |
270
|
|
|
analyses = {key: [] for key in map(lambda c: c.Title, categories)} |
271
|
|
|
|
272
|
|
|
# append the empty category as well |
273
|
|
|
analyses[""] = [] |
274
|
|
|
|
275
|
|
|
for brain in services: |
276
|
|
|
category = brain.getCategoryTitle |
277
|
|
|
if category in analyses: |
278
|
|
|
analyses[category].append(brain) |
279
|
|
|
return analyses |
280
|
|
|
|
281
|
|
|
@cache(cache_key) |
282
|
|
|
def get_service_uid_from(self, analysis): |
283
|
|
|
"""Return the service from the analysis |
284
|
|
|
""" |
285
|
|
|
analysis = api.get_object(analysis) |
286
|
|
|
return api.get_uid(analysis.getAnalysisService()) |
287
|
|
|
|
288
|
|
|
def get_calculation_dependencies_for(self, service): |
289
|
|
|
"""Calculation dependencies of this service and the calculation of each |
290
|
|
|
dependent service (recursively). |
291
|
|
|
|
292
|
|
|
TODO: This needs to go to bika.lims.api |
293
|
|
|
""" |
294
|
|
|
|
295
|
|
View Code Duplication |
def calc_dependencies_gen(service, collector=None): |
|
|
|
|
296
|
|
|
"""Generator for recursive dependency resolution. |
297
|
|
|
""" |
298
|
|
|
|
299
|
|
|
# The UID of the service |
300
|
|
|
service_uid = api.get_uid(service) |
301
|
|
|
|
302
|
|
|
# maintain an internal dependency mapping |
303
|
|
|
if collector is None: |
304
|
|
|
collector = {} |
305
|
|
|
|
306
|
|
|
# Stop iteration if we processed this service already |
307
|
|
|
if service_uid in collector: |
308
|
|
|
raise StopIteration |
309
|
|
|
|
310
|
|
|
# Get the calculation of the service. |
311
|
|
|
# The calculation comes either from an assigned method or the user |
312
|
|
|
# has set a calculation manually (see content/analysisservice.py). |
313
|
|
|
calculation = service.getCalculation() |
314
|
|
|
|
315
|
|
|
# Stop iteration if there is no calculation |
316
|
|
|
if not calculation: |
317
|
|
|
raise StopIteration |
318
|
|
|
|
319
|
|
|
# The services used in this calculation. |
320
|
|
|
# These are the actual dependencies of the used formula. |
321
|
|
|
dep_services = calculation.getDependentServices() |
322
|
|
|
for dep_service in dep_services: |
323
|
|
|
# get the UID of the dependent service |
324
|
|
|
dep_service_uid = api.get_uid(dep_service) |
325
|
|
|
|
326
|
|
|
# remember the dependent service |
327
|
|
|
collector[dep_service_uid] = dep_service |
328
|
|
|
|
329
|
|
|
# yield the dependent service |
330
|
|
|
yield dep_service |
331
|
|
|
|
332
|
|
|
# check the dependencies of the dependent services |
333
|
|
|
for ddep_service in calc_dependencies_gen(dep_service, |
334
|
|
|
collector=collector): |
335
|
|
|
yield ddep_service |
336
|
|
|
|
337
|
|
|
dependencies = {} |
338
|
|
|
for dep_service in calc_dependencies_gen(service): |
339
|
|
|
# Skip the initial (requested) service |
340
|
|
|
if dep_service == service: |
341
|
|
|
continue |
342
|
|
|
uid = api.get_uid(dep_service) |
343
|
|
|
dependencies[uid] = dep_service |
344
|
|
|
|
345
|
|
|
return dependencies |
346
|
|
|
|
347
|
|
|
def get_calculation_dependants_for(self, service): |
348
|
|
|
"""Calculation dependants of this service |
349
|
|
|
|
350
|
|
|
TODO: This needs to go to bika.lims.api |
351
|
|
|
""" |
352
|
|
|
|
353
|
|
View Code Duplication |
def calc_dependants_gen(service, collector=None): |
|
|
|
|
354
|
|
|
"""Generator for recursive resolution of dependant sevices. |
355
|
|
|
""" |
356
|
|
|
|
357
|
|
|
# The UID of the service |
358
|
|
|
service_uid = api.get_uid(service) |
359
|
|
|
|
360
|
|
|
# maintain an internal dependency mapping |
361
|
|
|
if collector is None: |
362
|
|
|
collector = {} |
363
|
|
|
|
364
|
|
|
# Stop iteration if we processed this service already |
365
|
|
|
if service_uid in collector: |
366
|
|
|
raise StopIteration |
367
|
|
|
|
368
|
|
|
# Get the dependant calculations of the service |
369
|
|
|
# (calculations that use the service in their formula). |
370
|
|
|
dep_calcs = service.getBackReferences('CalculationAnalysisService') |
371
|
|
|
for dep_calc in dep_calcs: |
372
|
|
|
# Get the methods linked to this calculation |
373
|
|
|
dep_methods = dep_calc.getBackReferences('MethodCalculation') |
374
|
|
|
for dep_method in dep_methods: |
375
|
|
|
# Get the services that have this method linked |
376
|
|
|
dep_services = dep_method.getBackReferences('AnalysisServiceMethod') |
377
|
|
|
for dep_service in dep_services: |
378
|
|
|
|
379
|
|
|
# get the UID of the dependent service |
380
|
|
|
dep_service_uid = api.get_uid(dep_service) |
381
|
|
|
|
382
|
|
|
# skip services with a different calculation, e.g. when |
383
|
|
|
# the user selected a calculation manually. |
384
|
|
|
if dep_service.getCalculation() != dep_calc: |
385
|
|
|
continue |
386
|
|
|
|
387
|
|
|
# remember the dependent service |
388
|
|
|
collector[dep_service_uid] = dep_service |
389
|
|
|
|
390
|
|
|
# yield the dependent service |
391
|
|
|
yield dep_service |
392
|
|
|
|
393
|
|
|
# check the dependants of the dependant services |
394
|
|
|
for ddep_service in calc_dependants_gen(dep_service, |
395
|
|
|
collector=collector): |
396
|
|
|
yield ddep_service |
397
|
|
|
|
398
|
|
|
dependants = {} |
399
|
|
|
for dep_service in calc_dependants_gen(service): |
400
|
|
|
# Skip the initial (requested) service |
401
|
|
|
if dep_service == service: |
402
|
|
|
continue |
403
|
|
|
uid = api.get_uid(dep_service) |
404
|
|
|
dependants[uid] = dep_service |
405
|
|
|
|
406
|
|
|
return dependants |
407
|
|
|
|
408
|
|
|
def get_service_dependencies_for(self, service): |
409
|
|
|
"""Calculate the dependencies for the given service. |
410
|
|
|
""" |
411
|
|
|
|
412
|
|
|
dependants = self.get_calculation_dependants_for(service) |
413
|
|
|
dependencies = self.get_calculation_dependencies_for(service) |
414
|
|
|
|
415
|
|
|
return { |
416
|
|
|
"dependencies": dependencies.values(), |
417
|
|
|
"dependants": dependants.values(), |
418
|
|
|
} |
419
|
|
|
|
420
|
|
|
def is_service_selected(self, service): |
421
|
|
|
"""Checks if the given service is selected by one of the ARs. |
422
|
|
|
This is used to make the whole line visible or not. |
423
|
|
|
""" |
424
|
|
|
service_uid = api.get_uid(service) |
425
|
|
|
for arnum in range(self.ar_count): |
426
|
|
|
analyses = self.fieldvalues.get("Analyses-{}".format(arnum)) |
427
|
|
|
if not analyses: |
428
|
|
|
continue |
429
|
|
|
service_uids = map(self.get_service_uid_from, analyses) |
430
|
|
|
if service_uid in service_uids: |
431
|
|
|
return True |
432
|
|
|
return False |
433
|
|
|
|
434
|
|
|
|
435
|
|
|
class AnalysisRequestManageView(BaseManageAddView): |
436
|
|
|
"""AR Manage View |
437
|
|
|
""" |
438
|
|
|
template = ViewPageTemplateFile("templates/ar_add_manage.pt") |
439
|
|
|
|
440
|
|
|
def __init__(self, context, request): |
441
|
|
|
BaseManageAddView.__init__(self, context, request) |
442
|
|
|
self.CONFIGURATION_STORAGE = \ |
443
|
|
|
"bika.lims.browser.analysisrequest.manage.add" |
444
|
|
|
self.SKIP_FIELD_ON_COPY = ["Sample"] |
445
|
|
|
|
446
|
|
|
def __call__(self): |
447
|
|
|
BaseManageAddView.__call__(self) |
448
|
|
|
return self.template() |
449
|
|
|
|
450
|
|
|
def get_obj(self): |
451
|
|
|
if not self.tmp_obj: |
452
|
|
|
self.tmp_obj = self.context.restrictedTraverse( |
453
|
|
|
"portal_factory/AnalysisRequest/Request new analyses") |
454
|
|
|
return self.tmp_obj |
455
|
|
|
|
456
|
|
|
|
457
|
|
|
class ajaxAnalysisRequestAddView(BaseAjaxAddView, AnalysisRequestAddView): |
458
|
|
|
"""Ajax helpers for the analysis request add form |
459
|
|
|
""" |
460
|
|
|
implements(IPublishTraverse) |
461
|
|
|
|
462
|
|
|
def __init__(self, context, request): |
463
|
|
|
AnalysisRequestAddView.__init__(self, context, request) |
464
|
|
|
BaseAjaxAddView.__init__(self, context, request) |
465
|
|
|
|
466
|
|
|
@cache(cache_key) |
467
|
|
|
def get_client_info(self, obj): |
468
|
|
|
"""Returns the client info of an object |
469
|
|
|
""" |
470
|
|
|
info = self.get_base_info(obj) |
471
|
|
|
info.update({}) |
472
|
|
|
|
473
|
|
|
# UID of the client |
474
|
|
|
uid = api.get_uid(obj) |
475
|
|
|
|
476
|
|
|
# Bika Setup folder |
477
|
|
|
bika_setup = api.get_bika_setup() |
478
|
|
|
|
479
|
|
|
# bika samplepoints |
480
|
|
|
bika_samplepoints = bika_setup.bika_samplepoints |
481
|
|
|
bika_samplepoints_uid = api.get_uid(bika_samplepoints) |
482
|
|
|
|
483
|
|
|
# bika artemplates |
484
|
|
|
bika_artemplates = bika_setup.bika_artemplates |
485
|
|
|
bika_artemplates_uid = api.get_uid(bika_artemplates) |
486
|
|
|
|
487
|
|
|
# bika analysisprofiles |
488
|
|
|
bika_analysisprofiles = bika_setup.bika_analysisprofiles |
489
|
|
|
bika_analysisprofiles_uid = api.get_uid(bika_analysisprofiles) |
490
|
|
|
|
491
|
|
|
# bika analysisspecs |
492
|
|
|
bika_analysisspecs = bika_setup.bika_analysisspecs |
493
|
|
|
bika_analysisspecs_uid = api.get_uid(bika_analysisspecs) |
494
|
|
|
|
495
|
|
|
# catalog queries for UI field filtering |
496
|
|
|
filter_queries = { |
497
|
|
|
"contact": { |
498
|
|
|
"getParentUID": [uid] |
499
|
|
|
}, |
500
|
|
|
"cc_contact": { |
501
|
|
|
"getParentUID": [uid] |
502
|
|
|
}, |
503
|
|
|
"invoice_contact": { |
504
|
|
|
"getParentUID": [uid] |
505
|
|
|
}, |
506
|
|
|
"samplepoint": { |
507
|
|
|
"getClientUID": [uid, bika_samplepoints_uid], |
508
|
|
|
}, |
509
|
|
|
"artemplates": { |
510
|
|
|
"getClientUID": [uid, bika_artemplates_uid], |
511
|
|
|
}, |
512
|
|
|
"analysisprofiles": { |
513
|
|
|
"getClientUID": [uid, bika_analysisprofiles_uid], |
514
|
|
|
}, |
515
|
|
|
"analysisspecs": { |
516
|
|
|
"getClientUID": [uid, bika_analysisspecs_uid], |
517
|
|
|
}, |
518
|
|
|
"samplinground": { |
519
|
|
|
"getParentUID": [uid], |
520
|
|
View Code Duplication |
}, |
|
|
|
|
521
|
|
|
"sample": { |
522
|
|
|
"getClientUID": [uid], |
523
|
|
|
}, |
524
|
|
|
} |
525
|
|
|
info["filter_queries"] = filter_queries |
526
|
|
|
|
527
|
|
|
return info |
528
|
|
|
|
529
|
|
|
@cache(cache_key) |
530
|
|
|
def get_contact_info(self, obj): |
531
|
|
|
"""Returns the client info of an object |
532
|
|
|
""" |
533
|
|
|
|
534
|
|
|
info = self.get_base_info(obj) |
535
|
|
|
fullname = obj.getFullname() |
536
|
|
|
email = obj.getEmailAddress() |
537
|
|
|
|
538
|
|
|
# Note: It might get a circular dependency when calling: |
539
|
|
|
# map(self.get_contact_info, obj.getCCContact()) |
540
|
|
|
cccontacts = {} |
541
|
|
|
for contact in obj.getCCContact(): |
542
|
|
|
uid = api.get_uid(contact) |
543
|
|
|
fullname = contact.getFullname() |
544
|
|
|
email = contact.getEmailAddress() |
545
|
|
|
cccontacts[uid] = { |
546
|
|
|
"fullname": fullname, |
547
|
|
|
"email": email |
548
|
|
|
} |
549
|
|
|
|
550
|
|
|
info.update({ |
551
|
|
|
"fullname": fullname, |
552
|
|
|
"email": email, |
553
|
|
|
"cccontacts": cccontacts, |
554
|
|
|
}) |
555
|
|
|
|
556
|
|
|
return info |
557
|
|
|
|
558
|
|
|
@cache(cache_key) |
559
|
|
|
def get_service_info(self, obj): |
560
|
|
|
"""Returns the info for a Service |
561
|
|
|
""" |
562
|
|
|
info = self.get_base_info(obj) |
563
|
|
|
|
564
|
|
|
info.update({ |
565
|
|
|
"short_title": obj.getShortTitle(), |
566
|
|
|
"scientific_name": obj.getScientificName(), |
567
|
|
|
"unit": obj.getUnit(), |
568
|
|
|
"report_dry_matter": obj.getReportDryMatter(), |
569
|
|
|
"keyword": obj.getKeyword(), |
570
|
|
|
"methods": map(self.get_method_info, obj.getMethods()), |
571
|
|
|
"calculation": self.get_calculation_info(obj.getCalculation()), |
572
|
|
|
"price": obj.getPrice(), |
573
|
|
|
"currency_symbol": self.get_currency().symbol, |
574
|
|
|
"accredited": obj.getAccredited(), |
575
|
|
|
"category": obj.getCategoryTitle(), |
576
|
|
|
"poc": obj.getPointOfCapture(), |
577
|
|
|
|
578
|
|
|
}) |
579
|
|
View Code Duplication |
|
|
|
|
|
580
|
|
|
dependencies = self.get_calculation_dependencies_for(obj).values() |
581
|
|
|
info["dependencies"] = map(self.get_base_info, dependencies) |
582
|
|
|
# dependants = self.get_calculation_dependants_for(obj).values() |
583
|
|
|
# info["dependendants"] = map(self.get_base_info, dependants) |
584
|
|
|
return info |
585
|
|
|
|
586
|
|
|
@cache(cache_key) |
587
|
|
|
def get_template_info(self, obj): |
588
|
|
|
"""Returns the info for a Template |
589
|
|
|
""" |
590
|
|
|
client = self.get_client() |
591
|
|
|
client_uid = api.get_uid(client) if client else "" |
592
|
|
|
|
593
|
|
|
profile = obj.getAnalysisProfile() |
594
|
|
|
profile_uid = api.get_uid(profile) if profile else "" |
595
|
|
|
profile_title = profile.Title() if profile else "" |
596
|
|
|
|
597
|
|
|
sample_type = obj.getSampleType() |
598
|
|
|
sample_type_uid = api.get_uid(sample_type) if sample_type else "" |
599
|
|
|
sample_type_title = sample_type.Title() if sample_type else "" |
600
|
|
|
|
601
|
|
|
sample_point = obj.getSamplePoint() |
602
|
|
|
sample_point_uid = api.get_uid(sample_point) if sample_point else "" |
603
|
|
|
sample_point_title = sample_point.Title() if sample_point else "" |
604
|
|
|
|
605
|
|
|
service_uids = [] |
606
|
|
|
analyses_partitions = {} |
607
|
|
|
analyses = obj.getAnalyses() |
608
|
|
|
|
609
|
|
|
for record in analyses: |
610
|
|
|
service_uid = record.get("service_uid") |
611
|
|
|
service_uids.append(service_uid) |
612
|
|
|
analyses_partitions[service_uid] = record.get("partition") |
613
|
|
|
|
614
|
|
|
info = self.get_base_info(obj) |
615
|
|
|
info.update({ |
616
|
|
|
"analyses_partitions": analyses_partitions, |
617
|
|
|
"analysis_profile_title": profile_title, |
618
|
|
|
"analysis_profile_uid": profile_uid, |
619
|
|
|
"client_uid": client_uid, |
620
|
|
|
"composite": obj.getComposite(), |
621
|
|
|
"partitions": obj.getPartitions(), |
622
|
|
|
"remarks": obj.getRemarks(), |
623
|
|
|
"report_dry_matter": obj.getReportDryMatter(), |
624
|
|
|
"sample_point_title": sample_point_title, |
625
|
|
|
"sample_point_uid": sample_point_uid, |
626
|
|
|
"sample_type_title": sample_type_title, |
627
|
|
|
"sample_type_uid": sample_type_uid, |
628
|
|
|
"service_uids": service_uids, |
629
|
|
|
}) |
630
|
|
|
return info |
631
|
|
|
|
632
|
|
|
@cache(cache_key) |
633
|
|
|
def get_profile_info(self, obj): |
634
|
|
|
"""Returns the info for a Profile |
635
|
|
|
""" |
636
|
|
|
info = self.get_base_info(obj) |
637
|
|
|
info.update({}) |
638
|
|
|
return info |
639
|
|
|
|
640
|
|
|
@cache(cache_key) |
641
|
|
|
def get_method_info(self, obj): |
642
|
|
|
"""Returns the info for a Method |
643
|
|
|
""" |
644
|
|
|
info = self.get_base_info(obj) |
645
|
|
|
info.update({}) |
646
|
|
|
return info |
647
|
|
|
|
648
|
|
|
@cache(cache_key) |
649
|
|
|
def get_calculation_info(self, obj): |
650
|
|
|
"""Returns the info for a Calculation |
651
|
|
|
""" |
652
|
|
|
info = self.get_base_info(obj) |
653
|
|
|
info.update({}) |
654
|
|
|
return info |
655
|
|
|
|
656
|
|
View Code Duplication |
@cache(cache_key) |
|
|
|
|
657
|
|
|
def get_sampletype_info(self, obj): |
658
|
|
|
"""Returns the info for a Sample Type |
659
|
|
|
""" |
660
|
|
|
info = self.get_base_info(obj) |
661
|
|
|
|
662
|
|
|
# Bika Setup folder |
663
|
|
|
bika_setup = api.get_bika_setup() |
664
|
|
|
|
665
|
|
|
# bika samplepoints |
666
|
|
|
bika_samplepoints = bika_setup.bika_samplepoints |
667
|
|
|
bika_samplepoints_uid = api.get_uid(bika_samplepoints) |
668
|
|
|
|
669
|
|
|
# bika analysisspecs |
670
|
|
|
bika_analysisspecs = bika_setup.bika_analysisspecs |
671
|
|
|
bika_analysisspecs_uid = api.get_uid(bika_analysisspecs) |
672
|
|
|
|
673
|
|
|
# client |
674
|
|
|
client = self.get_client() |
675
|
|
|
client_uid = client and api.get_uid(client) or "" |
676
|
|
|
|
677
|
|
|
# sample matrix |
678
|
|
|
sample_matrix = obj.getSampleMatrix() |
679
|
|
|
sample_matrix_uid = sample_matrix and sample_matrix.UID() or "" |
680
|
|
|
sample_matrix_title = sample_matrix and sample_matrix.Title() or "" |
681
|
|
|
|
682
|
|
|
# container type |
683
|
|
|
container_type = obj.getContainerType() |
684
|
|
|
container_type_uid = container_type and container_type.UID() or "" |
685
|
|
|
container_type_title = container_type and container_type.Title() or "" |
686
|
|
|
|
687
|
|
|
# sample points |
688
|
|
|
sample_points = obj.getSamplePoints() |
689
|
|
|
sample_point_uids = map(lambda sp: sp.UID(), sample_points) |
690
|
|
|
sample_point_titles = map(lambda sp: sp.Title(), sample_points) |
691
|
|
|
|
692
|
|
|
info.update({ |
693
|
|
|
"prefix": obj.getPrefix(), |
694
|
|
|
"minimum_volume": obj.getMinimumVolume(), |
695
|
|
|
"hazardous": obj.getHazardous(), |
696
|
|
|
"retention_period": obj.getRetentionPeriod(), |
697
|
|
|
"sample_matrix_uid": sample_matrix_uid, |
698
|
|
|
"sample_matrix_title": sample_matrix_title, |
699
|
|
|
"container_type_uid": container_type_uid, |
700
|
|
|
"container_type_title": container_type_title, |
701
|
|
|
"sample_point_uids": sample_point_uids, |
702
|
|
|
"sample_point_titles": sample_point_titles, |
703
|
|
|
}) |
704
|
|
|
|
705
|
|
|
# catalog queries for UI field filtering |
706
|
|
|
filter_queries = { |
707
|
|
|
"samplepoint": { |
708
|
|
|
"getSampleTypeTitles": [obj.Title(), ''], |
709
|
|
|
"getClientUID": [client_uid, bika_samplepoints_uid], |
710
|
|
|
"sort_order": "descending", |
711
|
|
|
}, |
712
|
|
|
"specification": { |
713
|
|
|
"getSampleTypeTitle": obj.Title(), |
714
|
|
|
"getClientUID": [client_uid, bika_analysisspecs_uid], |
715
|
|
|
"sort_order": "descending", |
716
|
|
|
} |
717
|
|
|
} |
718
|
|
|
info["filter_queries"] = filter_queries |
719
|
|
|
|
720
|
|
|
return info |
721
|
|
|
|
722
|
|
|
@cache(cache_key) |
723
|
|
|
def get_sample_info(self, obj): |
724
|
|
|
"""Returns the info for a Sample |
725
|
|
|
""" |
726
|
|
|
info = self.get_base_info(obj) |
727
|
|
|
|
728
|
|
|
# sample type |
729
|
|
|
sample_type = obj.getSampleType() |
730
|
|
|
sample_type_uid = sample_type and sample_type.UID() or "" |
731
|
|
|
sample_type_title = sample_type and sample_type.Title() or "" |
732
|
|
|
|
733
|
|
|
# sample condition |
734
|
|
|
sample_condition = obj.getSampleCondition() |
735
|
|
|
sample_condition_uid = sample_condition and sample_condition.UID() or "" |
736
|
|
|
sample_condition_title = sample_condition and sample_condition.Title() or "" |
737
|
|
|
|
738
|
|
|
# storage location |
739
|
|
|
storage_location = obj.getStorageLocation() |
740
|
|
|
storage_location_uid = storage_location and storage_location.UID() or "" |
741
|
|
|
storage_location_title = storage_location and storage_location.Title() or "" |
742
|
|
|
|
743
|
|
|
# sample point |
744
|
|
|
sample_point = obj.getSamplePoint() |
745
|
|
|
sample_point_uid = sample_point and sample_point.UID() or "" |
746
|
|
|
sample_point_title = sample_point and sample_point.Title() or "" |
747
|
|
|
|
748
|
|
|
# container type |
749
|
|
|
container_type = sample_type and sample_type.getContainerType() or None |
750
|
|
|
container_type_uid = container_type and container_type.UID() or "" |
751
|
|
|
container_type_title = container_type and container_type.Title() or "" |
752
|
|
|
|
753
|
|
|
info.update({ |
754
|
|
|
"sample_id": obj.getSampleID(), |
755
|
|
|
"date_sampled": self.to_iso_date(obj.getDateSampled()), |
756
|
|
|
"sampling_date": self.to_iso_date(obj.getSamplingDate()), |
757
|
|
|
"sample_type_uid": sample_type_uid, |
758
|
|
|
"sample_type_title": sample_type_title, |
759
|
|
|
"container_type_uid": container_type_uid, |
760
|
|
View Code Duplication |
"container_type_title": container_type_title, |
|
|
|
|
761
|
|
|
"sample_condition_uid": sample_condition_uid, |
762
|
|
|
"sample_condition_title": sample_condition_title, |
763
|
|
|
"storage_location_uid": storage_location_uid, |
764
|
|
|
"storage_location_title": storage_location_title, |
765
|
|
|
"sample_point_uid": sample_point_uid, |
766
|
|
|
"sample_point_title": sample_point_title, |
767
|
|
|
"environmental_conditions": obj.getEnvironmentalConditions(), |
768
|
|
|
"composite": obj.getComposite(), |
769
|
|
|
"client_sample_id": obj.getClientSampleID(), |
770
|
|
|
"client_reference": obj.getClientReference(), |
771
|
|
|
"sampling_workflow_enabled": obj.getSamplingWorkflowEnabled(), |
772
|
|
|
"adhoc": obj.getAdHoc(), |
773
|
|
|
"remarks": obj.getRemarks(), |
774
|
|
|
}) |
775
|
|
|
return info |
776
|
|
|
|
777
|
|
|
@cache(cache_key) |
778
|
|
|
def get_specification_info(self, obj): |
779
|
|
|
"""Returns the info for a Specification |
780
|
|
|
""" |
781
|
|
|
info = self.get_base_info(obj) |
782
|
|
|
|
783
|
|
|
results_range = obj.getResultsRange() |
784
|
|
|
info.update({ |
785
|
|
|
"results_range": results_range, |
786
|
|
|
"sample_type_uid": obj.getSampleTypeUID(), |
787
|
|
|
"sample_type_title": obj.getSampleTypeTitle(), |
788
|
|
|
"client_uid": obj.getClientUID(), |
789
|
|
|
}) |
790
|
|
|
|
791
|
|
|
bsc = api.get_tool("bika_setup_catalog") |
792
|
|
|
|
793
|
|
|
def get_service_by_keyword(keyword): |
794
|
|
|
if keyword is None: |
795
|
|
|
return [] |
796
|
|
|
return map(api.get_object, bsc({ |
797
|
|
|
"portal_type": "AnalysisService", |
798
|
|
|
"getKeyword": keyword |
799
|
|
|
})) |
800
|
|
|
|
801
|
|
|
# append a mapping of service_uid -> specification |
802
|
|
|
specifications = {} |
803
|
|
|
for spec in results_range: |
804
|
|
|
service_uid = spec.get("uid") |
805
|
|
|
if service_uid is None: |
806
|
|
|
# service spec is not attached to a specific service, but to a keyword |
807
|
|
|
for service in get_service_by_keyword(spec.get("keyword")): |
808
|
|
|
service_uid = api.get_uid(service) |
809
|
|
|
specifications[service_uid] = spec |
810
|
|
|
continue |
811
|
|
|
specifications[service_uid] = spec |
812
|
|
|
info["specifications"] = specifications |
813
|
|
|
# spec'd service UIDs |
814
|
|
|
info["service_uids"] = specifications.keys() |
815
|
|
|
return info |
816
|
|
|
|
817
|
|
|
@cache(cache_key) |
818
|
|
|
def get_container_info(self, obj): |
819
|
|
|
"""Returns the info for a Container |
820
|
|
|
""" |
821
|
|
|
info = self.get_base_info(obj) |
822
|
|
|
info.update({}) |
823
|
|
|
return info |
824
|
|
|
|
825
|
|
|
def get_service_partitions(self, service, sampletype): |
826
|
|
|
"""Returns the Partition info for a Service and SampleType |
827
|
|
|
|
828
|
|
|
N.B.: This is actually not used as the whole partition, preservation |
829
|
|
|
and conservation settings are solely handled by AR Templates for all |
830
|
|
|
selected services. |
831
|
|
|
""" |
832
|
|
|
|
833
|
|
|
partitions = [] |
834
|
|
|
|
835
|
|
|
sampletype_uid = api.get_uid(sampletype) |
836
|
|
|
# partition setup of this service |
837
|
|
|
partition_setup = filter(lambda p: p.get("sampletype") == sampletype_uid, |
838
|
|
|
service.getPartitionSetup()) |
839
|
|
|
|
840
|
|
|
def get_containers(container_uids): |
841
|
|
|
containers = [] |
842
|
|
|
for container_uid in container_uids: |
843
|
|
|
container = api.get_object_by_uid(container_uid) |
844
|
|
|
if container.portal_type == "ContainerTypes": |
845
|
|
|
containers.extend(container.getContainers()) |
846
|
|
|
else: |
847
|
|
|
containers.append(container) |
848
|
|
|
return containers |
849
|
|
|
|
850
|
|
|
for partition in partition_setup: |
851
|
|
|
containers = get_containers(partition.get("container", [])) |
852
|
|
|
preservations = map(api.get_object_by_uid, partition.get("preservation", [])) |
853
|
|
|
partitions.append({ |
854
|
|
|
"separate": partition.get("separate", False) and True or False, |
855
|
|
|
"container": map(self.get_container_info, containers), |
856
|
|
|
"preservations": map(self.get_preservation_info, preservations), |
857
|
|
|
"minvol": partition.get("vol", ""), |
858
|
|
|
}) |
859
|
|
|
else: |
860
|
|
|
containers = [service.getContainer()] or [] |
861
|
|
|
preservations = [service.getPreservation()] or [] |
862
|
|
|
partitions.append({ |
863
|
|
|
"separate": service.getSeparate(), |
864
|
|
|
"container": map(self.get_container_info, containers), |
865
|
|
|
"preservations": map(self.get_preservation_info, preservations), |
866
|
|
|
"minvol": sampletype.getMinimumVolume() or "", |
867
|
|
|
}) |
868
|
|
|
|
869
|
|
|
return partitions |
870
|
|
|
|
871
|
|
|
def ajax_get_service(self): |
872
|
|
|
"""Returns the services information |
873
|
|
|
""" |
874
|
|
|
uid = self.request.form.get("uid", None) |
875
|
|
|
|
876
|
|
|
if uid is None: |
877
|
|
|
return self.error("Invalid UID", status=400) |
878
|
|
|
|
879
|
|
|
service = self.get_object_by_uid(uid) |
880
|
|
|
if not service: |
881
|
|
|
return self.error("Service not found", status=404) |
882
|
|
|
|
883
|
|
|
info = self.get_service_info(service) |
884
|
|
|
return info |
885
|
|
|
|
886
|
|
|
def ajax_recalculate_records(self): |
887
|
|
|
"""Recalculate all AR records and dependencies |
888
|
|
|
|
889
|
|
|
- samples |
890
|
|
|
- templates |
891
|
|
|
- profiles |
892
|
|
|
- services |
893
|
|
|
- dependecies |
894
|
|
|
|
895
|
|
|
XXX: This function has grown too much and needs refactoring! |
896
|
|
|
""" |
897
|
|
|
out = {} |
898
|
|
|
|
899
|
|
|
# The sorted records from the request |
900
|
|
|
records = self.get_records() |
901
|
|
|
|
902
|
|
|
for n, record in enumerate(records): |
903
|
|
|
|
904
|
|
|
# Mapping of client UID -> client object info |
905
|
|
|
client_metadata = {} |
906
|
|
|
# Mapping of contact UID -> contact object info |
907
|
|
|
contact_metadata = {} |
908
|
|
|
# Mapping of sample UID -> sample object info |
909
|
|
|
sample_metadata = {} |
910
|
|
|
# Mapping of sampletype UID -> sampletype object info |
911
|
|
|
sampletype_metadata = {} |
912
|
|
|
# Mapping of drymatter UID -> drymatter service info |
913
|
|
|
dms_metadata = {} |
914
|
|
|
# Mapping of drymatter service (dms) -> list of dependent services |
915
|
|
|
dms_to_services = {} |
916
|
|
|
# Mapping of dependent services -> drymatter service (dms) |
917
|
|
|
service_to_dms = {} |
918
|
|
|
# Mapping of specification UID -> specification object info |
919
|
|
|
specification_metadata = {} |
920
|
|
|
# Mapping of specification UID -> list of service UIDs |
921
|
|
|
specification_to_services = {} |
922
|
|
|
# Mapping of service UID -> list of specification UIDs |
923
|
|
|
service_to_specifications = {} |
924
|
|
|
# Mapping of template UID -> template object info |
925
|
|
|
template_metadata = {} |
926
|
|
|
# Mapping of template UID -> list of service UIDs |
927
|
|
|
template_to_services = {} |
928
|
|
|
# Mapping of service UID -> list of template UIDs |
929
|
|
|
service_to_templates = {} |
930
|
|
|
# Mapping of profile UID -> list of service UIDs |
931
|
|
|
profile_to_services = {} |
932
|
|
|
# Mapping of service UID -> list of profile UIDs |
933
|
|
|
service_to_profiles = {} |
934
|
|
|
# Profile metadata for UI purposes |
935
|
|
|
profile_metadata = {} |
936
|
|
|
# Mapping of service UID -> service object info |
937
|
|
|
service_metadata = {} |
938
|
|
|
# mapping of service UID -> unmet service dependency UIDs |
939
|
|
|
unmet_dependencies = {} |
940
|
|
|
|
941
|
|
|
# Internal mappings of UID -> object of selected items in this record |
942
|
|
|
_clients = self.get_objs_from_record(record, "Client_uid") |
943
|
|
|
_contacts = self.get_objs_from_record(record, "Contact_uid") |
944
|
|
|
_specifications = self.get_objs_from_record(record, "Specification_uid") |
945
|
|
|
_templates = self.get_objs_from_record(record, "Template_uid") |
946
|
|
|
_samples = self.get_objs_from_record(record, "Sample_uid") |
947
|
|
|
_profiles = self.get_objs_from_record(record, "Profiles_uid") |
948
|
|
|
_services = self.get_objs_from_record(record, "Analyses") |
949
|
|
|
_sampletypes = self.get_objs_from_record(record, "SampleType_uid") |
950
|
|
|
|
951
|
|
|
# CLIENTS |
952
|
|
|
for uid, obj in _clients.iteritems(): |
953
|
|
|
# get the client metadata |
954
|
|
|
metadata = self.get_client_info(obj) |
955
|
|
|
# remember the sampletype metadata |
956
|
|
|
client_metadata[uid] = metadata |
957
|
|
|
|
958
|
|
|
# CONTACTS |
959
|
|
|
for uid, obj in _contacts.iteritems(): |
960
|
|
|
# get the client metadata |
961
|
|
|
metadata = self.get_contact_info(obj) |
962
|
|
|
# remember the sampletype metadata |
963
|
|
|
contact_metadata[uid] = metadata |
964
|
|
|
|
965
|
|
|
# SPECIFICATIONS |
966
|
|
|
for uid, obj in _specifications.iteritems(): |
967
|
|
|
# get the specification metadata |
968
|
|
|
metadata = self.get_specification_info(obj) |
969
|
|
|
# remember the metadata of this specification |
970
|
|
|
specification_metadata[uid] = metadata |
971
|
|
|
# get the spec'd service UIDs |
972
|
|
|
service_uids = metadata["service_uids"] |
973
|
|
|
# remember a mapping of specification uid -> spec'd services |
974
|
|
|
specification_to_services[uid] = service_uids |
975
|
|
|
# remember a mapping of service uid -> specifications |
976
|
|
|
for service_uid in service_uids: |
977
|
|
|
if service_uid in service_to_specifications: |
978
|
|
|
service_to_specifications[service_uid].append(uid) |
979
|
|
|
else: |
980
|
|
|
service_to_specifications[service_uid] = [uid] |
981
|
|
|
|
982
|
|
|
# AR TEMPLATES |
983
|
|
|
for uid, obj in _templates.iteritems(): |
984
|
|
|
# get the template metadata |
985
|
|
|
metadata = self.get_template_info(obj) |
986
|
|
|
# remember the template metadata |
987
|
|
|
template_metadata[uid] = metadata |
988
|
|
|
|
989
|
|
|
# XXX notify below to include the drymatter service as well |
990
|
|
|
record["ReportDryMatter"] = obj.getReportDryMatter() |
991
|
|
|
|
992
|
|
|
# profile from the template |
993
|
|
|
profile = obj.getAnalysisProfile() |
994
|
|
|
# add the profile to the other profiles |
995
|
|
|
if profile is not None: |
996
|
|
|
profile_uid = api.get_uid(profile) |
997
|
|
|
_profiles[profile_uid] = profile |
998
|
|
|
|
999
|
|
|
# get the template analyses |
1000
|
|
|
# [{'partition': 'part-1', 'service_uid': 'a6c5ff56a00e427a884e313d7344f966'}, |
1001
|
|
|
# {'partition': 'part-1', 'service_uid': 'dd6b0f756a5b4b17b86f72188ee81c80'}] |
1002
|
|
|
analyses = obj.getAnalyses() or [] |
1003
|
|
|
# get all UIDs of the template records |
1004
|
|
|
service_uids = map(lambda rec: rec.get("service_uid"), analyses) |
1005
|
|
|
# remember a mapping of template uid -> service |
1006
|
|
|
template_to_services[uid] = service_uids |
1007
|
|
|
# remember a mapping of service uid -> templates |
1008
|
|
|
for service_uid in service_uids: |
1009
|
|
|
# append service to services mapping |
1010
|
|
|
service = self.get_object_by_uid(service_uid) |
1011
|
|
|
# remember the template of all services |
1012
|
|
|
if service_uid in service_to_templates: |
1013
|
|
|
service_to_templates[service_uid].append(uid) |
1014
|
|
|
else: |
1015
|
|
|
service_to_templates[service_uid] = [uid] |
1016
|
|
|
|
1017
|
|
|
# DRY MATTER |
1018
|
|
|
dms = self.get_drymatter_service() |
1019
|
|
|
if dms and record.get("ReportDryMatter"): |
1020
|
|
|
# get the UID of the drymatter service |
1021
|
|
|
dms_uid = api.get_uid(dms) |
1022
|
|
|
# get the drymatter metadata |
1023
|
|
|
metadata = self.get_service_info(dms) |
1024
|
|
|
# remember the metadata of the drymatter service |
1025
|
|
|
dms_metadata[dms_uid] = metadata |
1026
|
|
|
# add the drymatter service to the service collection (processed later) |
1027
|
|
|
_services[dms_uid] = dms |
1028
|
|
|
# get the dependencies of the drymatter service |
1029
|
|
|
dms_deps = self.get_calculation_dependencies_for(dms) |
1030
|
|
|
# add the drymatter service dependencies to the service collection (processed later) |
1031
|
|
|
_services.update(dms_deps) |
1032
|
|
|
# remember a mapping of dms uid -> services |
1033
|
|
|
dms_to_services[dms_uid] = dms_deps.keys() + [dms_uid] |
1034
|
|
|
# remember a mapping of dms dependency uid -> dms |
1035
|
|
|
service_to_dms[dms_uid] = [dms_uid] |
1036
|
|
|
for dep_uid, dep in dms_deps.iteritems(): |
1037
|
|
|
if dep_uid in service_to_dms: |
1038
|
|
|
service_to_dms[dep_uid].append(dms_uid) |
1039
|
|
|
else: |
1040
|
|
|
service_to_dms[dep_uid] = [dms_uid] |
1041
|
|
|
|
1042
|
|
|
# PROFILES |
1043
|
|
|
for uid, obj in _profiles.iteritems(): |
1044
|
|
|
# get the profile metadata |
1045
|
|
|
metadata = self.get_profile_info(obj) |
1046
|
|
|
# remember the profile metadata |
1047
|
|
|
profile_metadata[uid] = metadata |
1048
|
|
|
# get all services of this profile |
1049
|
|
|
services = obj.getService() |
1050
|
|
|
# get all UIDs of the profile services |
1051
|
|
|
service_uids = map(api.get_uid, services) |
1052
|
|
|
# remember all services of this profile |
1053
|
|
|
profile_to_services[uid] = service_uids |
1054
|
|
|
# remember a mapping of service uid -> profiles |
1055
|
|
|
for service in services: |
1056
|
|
|
# get the UID of this service |
1057
|
|
|
service_uid = api.get_uid(service) |
1058
|
|
|
# add the service to the other services |
1059
|
|
|
_services[service_uid] = service |
1060
|
|
|
# remember the profiles of this service |
1061
|
|
|
if service_uid in service_to_profiles: |
1062
|
|
|
service_to_profiles[service_uid].append(uid) |
1063
|
|
|
else: |
1064
|
|
|
service_to_profiles[service_uid] = [uid] |
1065
|
|
|
|
1066
|
|
|
# SAMPLES |
1067
|
|
|
for uid, obj in _samples.iteritems(): |
1068
|
|
|
# get the sample metadata |
1069
|
|
|
metadata = self.get_sample_info(obj) |
1070
|
|
|
# remember the sample metadata |
1071
|
|
|
sample_metadata[uid] = metadata |
1072
|
|
|
|
1073
|
|
|
# SAMPLETYPES |
1074
|
|
|
for uid, obj in _sampletypes.iteritems(): |
1075
|
|
|
# get the sampletype metadata |
1076
|
|
|
metadata = self.get_sampletype_info(obj) |
1077
|
|
|
# remember the sampletype metadata |
1078
|
|
|
sampletype_metadata[uid] = metadata |
1079
|
|
|
|
1080
|
|
|
# SERVICES |
1081
|
|
|
for uid, obj in _services.iteritems(): |
1082
|
|
|
# get the service metadata |
1083
|
|
|
metadata = self.get_service_info(obj) |
1084
|
|
|
|
1085
|
|
|
# N.B.: Partitions only handled via AR Template. |
1086
|
|
|
# |
1087
|
|
|
# # Partition setup for the give sample type |
1088
|
|
|
# for st_uid, st_obj in _sampletypes.iteritems(): |
1089
|
|
|
# # remember the partition setup for this service |
1090
|
|
|
# metadata["partitions"] = self.get_service_partitions(obj, st_obj) |
1091
|
|
|
|
1092
|
|
|
# remember the services' metadata |
1093
|
|
|
service_metadata[uid] = metadata |
1094
|
|
|
|
1095
|
|
|
# DEPENDENCIES |
1096
|
|
|
for uid, obj in _services.iteritems(): |
1097
|
|
|
# get the dependencies of this service |
1098
|
|
|
deps = self.get_service_dependencies_for(obj) |
1099
|
|
|
|
1100
|
|
|
# check for unmet dependencies |
1101
|
|
|
for dep in deps["dependencies"]: |
1102
|
|
|
# we use the UID to test for equality |
1103
|
|
|
dep_uid = api.get_uid(dep) |
1104
|
|
|
if dep_uid not in _services.keys(): |
1105
|
|
|
if uid in unmet_dependencies: |
1106
|
|
|
unmet_dependencies[uid].append(self.get_base_info(dep)) |
1107
|
|
|
else: |
1108
|
|
|
unmet_dependencies[uid] = [self.get_base_info(dep)] |
1109
|
|
|
# remember the dependencies in the service metadata |
1110
|
|
|
service_metadata[uid].update({ |
1111
|
|
|
"dependencies": map(self.get_base_info, deps["dependencies"]), |
1112
|
|
|
"dependants": map(self.get_base_info, deps["dependants"]), |
1113
|
|
|
}) |
1114
|
|
|
|
1115
|
|
|
# Each key `n` (1,2,3...) contains the form data for one AR Add |
1116
|
|
|
# column in the UI. |
1117
|
|
|
# All relevant form data will be set accoriding to this data. |
1118
|
|
|
out[n] = { |
1119
|
|
|
"client_metadata": client_metadata, |
1120
|
|
|
"contact_metadata": contact_metadata, |
1121
|
|
|
"sample_metadata": sample_metadata, |
1122
|
|
|
"sampletype_metadata": sampletype_metadata, |
1123
|
|
|
"dms_metadata": dms_metadata, |
1124
|
|
|
"dms_to_services": dms_to_services, |
1125
|
|
|
"service_to_dms": service_to_dms, |
1126
|
|
|
"specification_metadata": specification_metadata, |
1127
|
|
|
"specification_to_services": specification_to_services, |
1128
|
|
|
"service_to_specifications": service_to_specifications, |
1129
|
|
|
"template_metadata": template_metadata, |
1130
|
|
|
"template_to_services": template_to_services, |
1131
|
|
|
"service_to_templates": service_to_templates, |
1132
|
|
|
"profile_metadata": profile_metadata, |
1133
|
|
|
"profile_to_services": profile_to_services, |
1134
|
|
|
"service_to_profiles": service_to_profiles, |
1135
|
|
|
"service_metadata": service_metadata, |
1136
|
|
|
"unmet_dependencies": unmet_dependencies, |
1137
|
|
|
} |
1138
|
|
|
|
1139
|
|
|
return out |
1140
|
|
|
|
1141
|
|
|
def show_recalculate_prices(self): |
1142
|
|
|
bika_setup = api.get_bika_setup() |
1143
|
|
|
return bika_setup.getShowPrices() |
1144
|
|
|
|
1145
|
|
|
def ajax_recalculate_prices(self): |
1146
|
|
|
"""Recalculate prices for all ARs |
1147
|
|
|
""" |
1148
|
|
|
# When the option "Include and display pricing information" in |
1149
|
|
|
# Bika Setup Accounting tab is not selected |
1150
|
|
|
if not self.show_recalculate_prices(): |
1151
|
|
|
return {} |
1152
|
|
|
|
1153
|
|
|
# The sorted records from the request |
1154
|
|
|
records = self.get_records() |
1155
|
|
|
|
1156
|
|
|
client = self.get_client() |
1157
|
|
|
bika_setup = api.get_bika_setup() |
1158
|
|
|
|
1159
|
|
|
member_discount = float(bika_setup.getMemberDiscount()) |
1160
|
|
|
member_discount_applies = False |
1161
|
|
|
if client: |
1162
|
|
|
member_discount_applies = client.getMemberDiscountApplies() |
1163
|
|
|
|
1164
|
|
|
prices = {} |
1165
|
|
|
for n, record in enumerate(records): |
1166
|
|
|
ardiscount_amount = 0.00 |
1167
|
|
|
arservices_price = 0.00 |
1168
|
|
|
arprofiles_price = 0.00 |
1169
|
|
|
arprofiles_vat_amount = 0.00 |
1170
|
|
|
arservice_vat_amount = 0.00 |
1171
|
|
|
services_from_priced_profile = [] |
1172
|
|
|
|
1173
|
|
|
profile_uids = record.get("Profiles_uid", "").split(",") |
1174
|
|
|
profile_uids = filter(lambda x: x, profile_uids) |
1175
|
|
|
profiles = map(self.get_object_by_uid, profile_uids) |
1176
|
|
|
services = map(self.get_object_by_uid, record.get("Analyses", [])) |
1177
|
|
|
|
1178
|
|
|
# ANALYSIS PROFILES PRICE |
1179
|
|
|
for profile in profiles: |
1180
|
|
|
use_profile_price = profile.getUseAnalysisProfilePrice() |
1181
|
|
|
if not use_profile_price: |
1182
|
|
|
continue |
1183
|
|
|
|
1184
|
|
|
profile_price = float(profile.getAnalysisProfilePrice()) |
1185
|
|
|
profile_vat = float(profile.getAnalysisProfileVAT()) |
1186
|
|
|
arprofiles_price += profile_price |
1187
|
|
|
arprofiles_vat_amount += profile_vat |
1188
|
|
|
profile_services = profile.getService() |
1189
|
|
|
services_from_priced_profile.extend(profile_services) |
1190
|
|
|
|
1191
|
|
|
# ANALYSIS SERVICES PRICE |
1192
|
|
|
for service in services: |
1193
|
|
|
if service in services_from_priced_profile: |
1194
|
|
|
continue |
1195
|
|
|
service_price = float(service.getPrice()) |
1196
|
|
|
# service_vat = float(service.getVAT()) |
1197
|
|
|
service_vat_amount = float(service.getVATAmount()) |
1198
|
|
|
arservice_vat_amount += service_vat_amount |
1199
|
|
|
arservices_price += service_price |
1200
|
|
|
|
1201
|
|
|
base_price = arservices_price + arprofiles_price |
1202
|
|
|
|
1203
|
|
|
# Calculate the member discount if it applies |
1204
|
|
|
if member_discount and member_discount_applies: |
1205
|
|
|
logger.info("Member discount applies with {}%".format(member_discount)) |
1206
|
|
|
ardiscount_amount = base_price * member_discount / 100 |
1207
|
|
|
|
1208
|
|
|
subtotal = base_price - ardiscount_amount |
1209
|
|
|
vat_amount = arprofiles_vat_amount + arservice_vat_amount |
1210
|
|
|
total = subtotal + vat_amount |
1211
|
|
|
|
1212
|
|
|
prices[n] = { |
1213
|
|
|
"discount": "{0:.2f}".format(ardiscount_amount), |
1214
|
|
|
"subtotal": "{0:.2f}".format(subtotal), |
1215
|
|
|
"vat": "{0:.2f}".format(vat_amount), |
1216
|
|
|
"total": "{0:.2f}".format(total), |
1217
|
|
|
} |
1218
|
|
|
logger.info("Prices for AR {}: Discount={discount} " |
1219
|
|
|
"VAT={vat} Subtotal={subtotal} total={total}" |
1220
|
|
|
.format(n, **prices[n])) |
1221
|
|
|
|
1222
|
|
|
return prices |
1223
|
|
|
|
1224
|
|
|
def ajax_submit(self): |
1225
|
|
|
"""Submit & create the ARs |
1226
|
|
|
""" |
1227
|
|
|
|
1228
|
|
|
# Get AR required fields (including extended fields) |
1229
|
|
|
fields = self.get_obj_fields() |
1230
|
|
|
|
1231
|
|
|
# extract records from request |
1232
|
|
|
records = self.get_records() |
1233
|
|
|
|
1234
|
|
|
fielderrors = {} |
1235
|
|
|
errors = {"message": "", "fielderrors": {}} |
1236
|
|
|
|
1237
|
|
|
attachments = {} |
1238
|
|
|
valid_records = [] |
1239
|
|
|
|
1240
|
|
|
# Validate required fields |
1241
|
|
|
for n, record in enumerate(records): |
1242
|
|
|
|
1243
|
|
|
# Process UID fields first and set their values to the linked field |
1244
|
|
|
uid_fields = filter(lambda f: f.endswith("_uid"), record) |
1245
|
|
|
for field in uid_fields: |
1246
|
|
|
name = field.replace("_uid", "") |
1247
|
|
|
value = record.get(field) |
1248
|
|
|
if "," in value: |
1249
|
|
|
value = value.split(",") |
1250
|
|
|
record[name] = value |
1251
|
|
|
|
1252
|
|
|
# Extract file uploads (fields ending with _file) |
1253
|
|
|
# These files will be added later as attachments |
1254
|
|
|
file_fields = filter(lambda f: f.endswith("_file"), record) |
1255
|
|
|
attachments[n] = map(lambda f: record.pop(f), file_fields) |
|
|
|
|
1256
|
|
|
|
1257
|
|
|
# Process Specifications field (dictionary like records instance). |
1258
|
|
|
# -> Convert to a standard Python dictionary. |
1259
|
|
|
specifications = map(lambda x: dict(x), record.pop("Specifications", [])) |
1260
|
|
|
record["Specifications"] = specifications |
1261
|
|
|
|
1262
|
|
|
# Required fields and their values |
1263
|
|
|
required_keys = [field.getName() for field in fields if field.required] |
1264
|
|
|
required_values = [record.get(key) for key in required_keys] |
1265
|
|
|
required_fields = dict(zip(required_keys, required_values)) |
1266
|
|
|
|
1267
|
|
|
# Client field is required but hidden in the AR Add form. We remove |
1268
|
|
|
# it therefore from the list of required fields to let empty |
1269
|
|
|
# columns pass the required check below. |
1270
|
|
|
if record.get("Client", False): |
1271
|
|
|
required_fields.pop('Client', None) |
1272
|
|
|
|
1273
|
|
|
# Contacts get pre-filled out if only one contact exists. |
1274
|
|
|
# We won't force those columns with only the Contact filled out to be required. |
1275
|
|
|
contact = required_fields.pop("Contact", None) |
1276
|
|
|
|
1277
|
|
|
# None of the required fields are filled, skip this record |
1278
|
|
|
if not any(required_fields.values()): |
1279
|
|
|
continue |
1280
|
|
|
|
1281
|
|
|
# Re-add the Contact |
1282
|
|
|
required_fields["Contact"] = contact |
1283
|
|
|
|
1284
|
|
|
# Missing required fields |
1285
|
|
|
missing = [f for f in required_fields if not record.get(f, None)] |
1286
|
|
|
|
1287
|
|
|
# If there are required fields missing, flag an error |
1288
|
|
|
for field in missing: |
1289
|
|
|
fieldname = "{}-{}".format(field, n) |
1290
|
|
|
msg = _("Field '{}' is required".format(field)) |
1291
|
|
|
fielderrors[fieldname] = msg |
1292
|
|
|
|
1293
|
|
|
# Selected Analysis UIDs |
1294
|
|
|
selected_analysis_uids = record.get("Analyses", []) |
1295
|
|
|
|
1296
|
|
|
# Partitions defined in Template |
1297
|
|
|
template_parts = {} |
1298
|
|
|
template_uid = record.get("Template_uid") |
1299
|
|
|
if template_uid: |
1300
|
|
|
template = api.get_object_by_uid(template_uid) |
1301
|
|
|
for part in template.getPartitions(): |
1302
|
|
|
# remember the part setup by part_id |
1303
|
|
|
template_parts[part.get("part_id")] = part |
1304
|
|
|
|
1305
|
|
|
# The final data structure should look like this: |
1306
|
|
|
# [{"part_id": "...", "container_uid": "...", "services": []}] |
1307
|
|
|
partitions = {} |
1308
|
|
|
parts = record.pop("Parts", []) |
1309
|
|
|
for part in parts: |
1310
|
|
|
part_id = part.get("part") |
1311
|
|
|
service_uid = part.get("uid") |
1312
|
|
|
# skip unselected Services |
1313
|
|
|
if service_uid not in selected_analysis_uids: |
1314
|
|
|
continue |
1315
|
|
|
# Container UID for this part |
1316
|
|
|
container_uids = [] |
1317
|
|
|
template_part = template_parts.get(part_id) |
1318
|
|
|
if template_part: |
1319
|
|
|
container_uid = template_part.get("container_uid") |
1320
|
|
|
if container_uid: |
1321
|
|
|
container_uids.append(container_uid) |
1322
|
|
|
|
1323
|
|
|
# remember the part id and the services |
1324
|
|
|
if part_id not in partitions: |
1325
|
|
|
partitions[part_id] = { |
1326
|
|
|
"part_id": part_id, |
1327
|
|
|
"container_uid": container_uids, |
1328
|
|
|
"services": [service_uid], |
1329
|
|
|
} |
1330
|
|
|
else: |
1331
|
|
|
partitions[part_id]["services"].append(service_uid) |
1332
|
|
|
|
1333
|
|
|
# Inject the Partitions to the record (will be picked up during the AR creation) |
1334
|
|
|
record["Partitions"] = partitions.values() |
1335
|
|
|
|
1336
|
|
|
# Process valid record |
1337
|
|
|
valid_record = dict() |
1338
|
|
|
for fieldname, fieldvalue in record.iteritems(): |
1339
|
|
|
# clean empty |
1340
|
|
|
if fieldvalue in ['', None]: |
1341
|
|
|
continue |
1342
|
|
|
valid_record[fieldname] = fieldvalue |
1343
|
|
|
|
1344
|
|
|
# append the valid record to the list of valid records |
1345
|
|
|
valid_records.append(valid_record) |
1346
|
|
|
|
1347
|
|
|
# return immediately with an error response if some field checks failed |
1348
|
|
|
if fielderrors: |
1349
|
|
|
errors["fielderrors"] = fielderrors |
1350
|
|
|
return {'errors': errors} |
1351
|
|
|
|
1352
|
|
|
# Process Form |
1353
|
|
|
ARs = [] |
1354
|
|
|
for n, record in enumerate(valid_records): |
1355
|
|
|
client_uid = record.get("Client") |
1356
|
|
|
client = self.get_object_by_uid(client_uid) |
1357
|
|
|
|
1358
|
|
|
if not client: |
1359
|
|
|
raise RuntimeError("No client found") |
1360
|
|
|
|
1361
|
|
|
# get the specifications and pass them directly to the AR create function. |
1362
|
|
|
specifications = record.pop("Specifications", {}) |
1363
|
|
|
|
1364
|
|
|
# Create the Analysis Request |
1365
|
|
|
try: |
1366
|
|
|
ar = crar(client, self.request, record, specifications=specifications) |
1367
|
|
|
except (KeyError, RuntimeError) as e: |
1368
|
|
|
errors["message"] = e.message |
1369
|
|
|
return {"errors": errors} |
1370
|
|
|
ARs.append(ar.Title()) |
1371
|
|
|
|
1372
|
|
|
_attachments = [] |
1373
|
|
|
for attachment in attachments.get(n, []): |
1374
|
|
|
if not attachment.filename: |
1375
|
|
|
continue |
1376
|
|
|
att = _createObjectByType("Attachment", self.context, tmpID()) |
1377
|
|
|
att.setAttachmentFile(attachment) |
1378
|
|
|
att.processForm() |
1379
|
|
|
_attachments.append(att) |
1380
|
|
|
if _attachments: |
1381
|
|
|
ar.setAttachment(_attachments) |
1382
|
|
|
|
1383
|
|
|
level = "info" |
1384
|
|
|
if len(ARs) == 0: |
1385
|
|
|
message = _('No Analysis Requests could be created.') |
1386
|
|
|
level = "error" |
1387
|
|
|
elif len(ARs) > 1: |
1388
|
|
|
message = _('Analysis requests ${ARs} were successfully created.', |
1389
|
|
|
mapping={'ARs': safe_unicode(', '.join(ARs))}) |
1390
|
|
|
else: |
1391
|
|
|
message = _('Analysis request ${AR} was successfully created.', |
1392
|
|
|
mapping={'AR': safe_unicode(ARs[0])}) |
1393
|
|
|
|
1394
|
|
|
# Display a portal message |
1395
|
|
|
self.context.plone_utils.addPortalMessage(message, level) |
1396
|
|
|
|
1397
|
|
|
# Automatic label printing won't print "register" labels for Secondary. ARs |
1398
|
|
|
bika_setup = api.get_bika_setup() |
1399
|
|
|
auto_print = bika_setup.getAutoPrintStickers() |
1400
|
|
|
|
1401
|
|
|
# https://github.com/bikalabs/bika.lims/pull/2153 |
1402
|
|
|
new_ars = [a for a in ARs if a[-1] == '1'] |
1403
|
|
|
|
1404
|
|
|
if 'register' in auto_print and new_ars: |
1405
|
|
|
return { |
1406
|
|
|
'success': message, |
1407
|
|
|
'stickers': new_ars, |
1408
|
|
|
'stickertemplate': self.context.bika_setup.getAutoStickerTemplate() |
1409
|
|
|
} |
1410
|
|
|
else: |
1411
|
|
|
return {'success': message} |
1412
|
|
|
|