1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# This file is part of SENAITE.CORE. |
4
|
|
|
# |
5
|
|
|
# SENAITE.CORE is free software: you can redistribute it and/or modify it under |
6
|
|
|
# the terms of the GNU General Public License as published by the Free Software |
7
|
|
|
# Foundation, version 2. |
8
|
|
|
# |
9
|
|
|
# This program is distributed in the hope that it will be useful, but WITHOUT |
10
|
|
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
11
|
|
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more |
12
|
|
|
# details. |
13
|
|
|
# |
14
|
|
|
# You should have received a copy of the GNU General Public License along with |
15
|
|
|
# this program; if not, write to the Free Software Foundation, Inc., 51 |
16
|
|
|
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
17
|
|
|
# |
18
|
|
|
# Copyright 2018-2019 by it's authors. |
19
|
|
|
# Some rights reserved, see README and LICENSE. |
20
|
|
|
|
21
|
|
|
import itertools |
22
|
|
|
import os |
23
|
|
|
import tempfile |
24
|
|
|
from email.mime.multipart import MIMEMultipart |
25
|
|
|
from email.mime.text import MIMEText |
26
|
|
|
|
27
|
|
|
from Products.CMFCore.utils import getToolByName |
28
|
|
|
from Products.CMFPlone.utils import _createObjectByType |
29
|
|
|
from Products.CMFPlone.utils import safe_unicode |
30
|
|
|
from email.Utils import formataddr |
31
|
|
|
from zope.interface import alsoProvides |
32
|
|
|
from zope.lifecycleevent import modified |
33
|
|
|
|
34
|
|
|
from bika.lims import api |
35
|
|
|
from bika.lims import bikaMessageFactory as _ |
36
|
|
|
from bika.lims import logger |
37
|
|
|
from bika.lims.idserver import renameAfterCreation |
38
|
|
|
from bika.lims.interfaces import IAnalysisRequest |
39
|
|
|
from bika.lims.interfaces import IAnalysisRequestRetest |
40
|
|
|
from bika.lims.interfaces import IAnalysisRequestSecondary |
41
|
|
|
from bika.lims.interfaces import IAnalysisService |
42
|
|
|
from bika.lims.interfaces import IReceived |
43
|
|
|
from bika.lims.interfaces import IRoutineAnalysis |
44
|
|
|
from bika.lims.utils import attachPdf |
45
|
|
|
from bika.lims.utils import changeWorkflowState |
46
|
|
|
from bika.lims.utils import copy_field_values |
47
|
|
|
from bika.lims.utils import createPdf |
48
|
|
|
from bika.lims.utils import encode_header |
49
|
|
|
from bika.lims.utils import tmpID |
50
|
|
|
from bika.lims.utils import to_utf8 |
51
|
|
|
from bika.lims.workflow import ActionHandlerPool |
52
|
|
|
from bika.lims.workflow import doActionFor |
53
|
|
|
from bika.lims.workflow import push_reindex_to_actions_pool |
54
|
|
|
from bika.lims.workflow.analysisrequest import AR_WORKFLOW_ID |
55
|
|
|
from bika.lims.workflow.analysisrequest import do_action_to_analyses |
56
|
|
|
|
57
|
|
|
|
58
|
|
|
def create_analysisrequest(client, request, values, analyses=None, |
59
|
|
|
partitions=None, specifications=None, prices=None): |
60
|
|
|
"""This is meant for general use and should do everything necessary to |
61
|
|
|
create and initialise an AR and any other required auxilliary objects |
62
|
|
|
(Sample, SamplePartition, Analysis...) |
63
|
|
|
:param client: |
64
|
|
|
The container (Client) in which the ARs will be created. |
65
|
|
|
:param request: |
66
|
|
|
The current Request object. |
67
|
|
|
:param values: |
68
|
|
|
a dict, where keys are AR|Sample schema field names. |
69
|
|
|
:param analyses: |
70
|
|
|
Analysis services list. If specified, augments the values in |
71
|
|
|
values['Analyses']. May consist of service objects, UIDs, or Keywords. |
72
|
|
|
:param partitions: |
73
|
|
|
A list of dictionaries, if specific partitions are required. If not |
74
|
|
|
specified, AR's sample is created with a single partition. |
75
|
|
|
:param specifications: |
76
|
|
|
These values augment those found in values['Specifications'] |
77
|
|
|
:param prices: |
78
|
|
|
Allow different prices to be set for analyses. If not set, prices |
79
|
|
|
are read from the associated analysis service. |
80
|
|
|
""" |
81
|
|
|
# Don't pollute the dict param passed in |
82
|
|
|
values = dict(values.items()) |
83
|
|
|
|
84
|
|
|
# Create the Analysis Request |
85
|
|
|
ar = _createObjectByType('AnalysisRequest', client, tmpID()) |
86
|
|
|
|
87
|
|
|
# Resolve the services uids and set the analyses for this Analysis Request |
88
|
|
|
service_uids = get_services_uids(context=client, values=values, |
89
|
|
|
analyses_serv=analyses) |
90
|
|
|
ar.setAnalyses(service_uids, prices=prices, specs=specifications) |
91
|
|
|
values.update({"Analyses": service_uids}) |
92
|
|
|
ar.processForm(REQUEST=request, values=values) |
93
|
|
|
|
94
|
|
|
# Handle hidden analyses from template and profiles |
95
|
|
|
# https://github.com/senaite/senaite.core/issues/1437 |
96
|
|
|
# https://github.com/senaite/senaite.core/issues/1326 |
97
|
|
|
apply_hidden_services(ar) |
98
|
|
|
|
99
|
|
|
# Handle rejection reasons |
100
|
|
|
rejection_reasons = resolve_rejection_reasons(values) |
101
|
|
|
ar.setRejectionReasons(rejection_reasons) |
102
|
|
|
|
103
|
|
|
# Handle secondary Analysis Request |
104
|
|
|
primary = ar.getPrimaryAnalysisRequest() |
105
|
|
|
if primary: |
106
|
|
|
# Mark the secondary with the `IAnalysisRequestSecondary` interface |
107
|
|
|
alsoProvides(ar, IAnalysisRequestSecondary) |
108
|
|
|
|
109
|
|
|
# Rename the secondary according to the ID server setup |
110
|
|
|
renameAfterCreation(ar) |
111
|
|
|
|
112
|
|
|
# Set dates to match with those from the primary |
113
|
|
|
ar.setDateSampled(primary.getDateSampled()) |
114
|
|
|
ar.setSamplingDate(primary.getSamplingDate()) |
115
|
|
|
ar.setDateReceived(primary.getDateReceived()) |
116
|
|
|
|
117
|
|
|
# Force the transition of the secondary to received and set the |
118
|
|
|
# description/comment in the transition accordingly. |
119
|
|
|
if primary.getDateReceived(): |
120
|
|
|
primary_id = primary.getId() |
121
|
|
|
comment = "Auto-received. Secondary Sample of {}".format(primary_id) |
122
|
|
|
changeWorkflowState(ar, AR_WORKFLOW_ID, "sample_received", |
123
|
|
|
action="receive", comments=comment) |
124
|
|
|
|
125
|
|
|
# Mark the secondary as received |
126
|
|
|
alsoProvides(ar, IReceived) |
127
|
|
|
|
128
|
|
|
# Initialize analyses |
129
|
|
|
do_action_to_analyses(ar, "initialize") |
130
|
|
|
|
131
|
|
|
# Notify the ar has ben modified |
132
|
|
|
modified(ar) |
133
|
|
|
|
134
|
|
|
# Reindex the AR |
135
|
|
|
ar.reindexObject() |
136
|
|
|
|
137
|
|
|
# If rejection reasons have been set, reject automatically |
138
|
|
|
if rejection_reasons: |
139
|
|
|
doActionFor(ar, "reject") |
140
|
|
|
|
141
|
|
|
# In "received" state already |
142
|
|
|
return ar |
143
|
|
|
|
144
|
|
|
# Try first with no sampling transition, cause it is the most common config |
145
|
|
|
success, message = doActionFor(ar, "no_sampling_workflow") |
146
|
|
|
if not success: |
147
|
|
|
doActionFor(ar, "to_be_sampled") |
148
|
|
|
|
149
|
|
|
# If rejection reasons have been set, reject the sample automatically |
150
|
|
|
if rejection_reasons: |
151
|
|
|
doActionFor(ar, "reject") |
152
|
|
|
|
153
|
|
|
return ar |
154
|
|
|
|
155
|
|
|
|
156
|
|
|
def apply_hidden_services(sample): |
157
|
|
|
""" |
158
|
|
|
Applies the hidden setting to the sample analyses in accordance with the |
159
|
|
|
settings from its template and/or profiles |
160
|
|
|
:param sample: the sample that contains the analyses |
161
|
|
|
""" |
162
|
|
|
hidden = list() |
163
|
|
|
|
164
|
|
|
# Get the "hidden" service uids from the template |
165
|
|
|
template = sample.getTemplate() |
166
|
|
|
hidden = get_hidden_service_uids(template) |
167
|
|
|
|
168
|
|
|
# Get the "hidden" service uids from profiles |
169
|
|
|
profiles = sample.getProfiles() |
170
|
|
|
hid_profiles = map(get_hidden_service_uids, profiles) |
171
|
|
|
hid_profiles = list(itertools.chain(*hid_profiles)) |
172
|
|
|
hidden.extend(hid_profiles) |
173
|
|
|
|
174
|
|
|
# Update the sample analyses |
175
|
|
|
analyses = sample.getAnalyses(full_objects=True) |
176
|
|
|
analyses = filter(lambda an: an.getServiceUID() in hidden, analyses) |
177
|
|
|
for analysis in analyses: |
178
|
|
|
analysis.setHidden(True) |
179
|
|
|
|
180
|
|
|
|
181
|
|
|
def get_hidden_service_uids(profile_or_template): |
182
|
|
|
"""Returns a list of service uids that are set as hidden |
183
|
|
|
:param profile_or_template: ARTemplate or AnalysisProfile object |
184
|
|
|
""" |
185
|
|
|
if not profile_or_template: |
186
|
|
|
return [] |
187
|
|
|
settings = profile_or_template.getAnalysisServicesSettings() |
188
|
|
|
hidden = filter(lambda ser: ser.get("hidden", False), settings) |
189
|
|
|
return map(lambda setting: setting["uid"], hidden) |
190
|
|
|
|
191
|
|
|
|
192
|
|
|
def get_services_uids(context=None, analyses_serv=None, values=None): |
193
|
|
|
""" |
194
|
|
|
This function returns a list of UIDs from analyses services from its |
195
|
|
|
parameters. |
196
|
|
|
:param analyses_serv: A list (or one object) of service-related info items. |
197
|
|
|
see _resolve_items_to_service_uids() docstring. |
198
|
|
|
:type analyses_serv: list |
199
|
|
|
:param values: a dict, where keys are AR|Sample schema field names. |
200
|
|
|
:type values: dict |
201
|
|
|
:returns: a list of analyses services UIDs |
202
|
|
|
""" |
203
|
|
|
if not analyses_serv: |
204
|
|
|
analyses_serv = [] |
205
|
|
|
if not values: |
206
|
|
|
values = {} |
207
|
|
|
|
208
|
|
|
if not context or (not analyses_serv and not values): |
209
|
|
|
raise RuntimeError( |
210
|
|
|
"get_services_uids: Missing or wrong parameters.") |
211
|
|
|
|
212
|
|
|
# Merge analyses from analyses_serv and values into one list |
213
|
|
|
analyses_services = analyses_serv + (values.get("Analyses", None) or []) |
214
|
|
|
|
215
|
|
|
# It is possible to create analysis requests |
216
|
|
|
# by JSON petitions and services, profiles or types aren't allways send. |
217
|
|
|
# Sometimes we can get analyses and profiles that doesn't match and we |
218
|
|
|
# should act in consequence. |
219
|
|
|
# Getting the analyses profiles |
220
|
|
|
analyses_profiles = values.get('Profiles', []) |
221
|
|
|
if not isinstance(analyses_profiles, (list, tuple)): |
222
|
|
|
# Plone converts the incoming form value to a list, if there are |
223
|
|
|
# multiple values; but if not, it will send a string (a single UID). |
224
|
|
|
analyses_profiles = [analyses_profiles] |
225
|
|
|
|
226
|
|
|
if not analyses_services and not analyses_profiles: |
227
|
|
|
return [] |
228
|
|
|
|
229
|
|
|
# Add analysis services UIDs from profiles to analyses_services variable. |
230
|
|
|
if analyses_profiles: |
231
|
|
|
uid_catalog = getToolByName(context, 'uid_catalog') |
232
|
|
|
for brain in uid_catalog(UID=analyses_profiles): |
233
|
|
|
profile = api.get_object(brain) |
234
|
|
|
# Only services UIDs |
235
|
|
|
services_uids = profile.getRawService() |
236
|
|
|
# _resolve_items_to_service_uids() will remove duplicates |
237
|
|
|
analyses_services += services_uids |
238
|
|
|
|
239
|
|
|
return _resolve_items_to_service_uids(analyses_services) |
240
|
|
|
|
241
|
|
|
|
242
|
|
|
def _resolve_items_to_service_uids(items): |
243
|
|
|
""" Returns a list of service uids without duplicates based on the items |
244
|
|
|
:param items: |
245
|
|
|
A list (or one object) of service-related info items. The list can be |
246
|
|
|
heterogeneous and each item can be: |
247
|
|
|
- Analysis Service instance |
248
|
|
|
- Analysis instance |
249
|
|
|
- Analysis Service title |
250
|
|
|
- Analysis Service UID |
251
|
|
|
- Analysis Service Keyword |
252
|
|
|
If an item that doesn't match any of the criterias above is found, the |
253
|
|
|
function will raise a RuntimeError |
254
|
|
|
""" |
255
|
|
|
def resolve_to_uid(item): |
256
|
|
|
if api.is_uid(item): |
257
|
|
|
return item |
258
|
|
|
elif IAnalysisService.providedBy(item): |
259
|
|
|
return item.UID() |
260
|
|
|
elif IRoutineAnalysis.providedBy(item): |
261
|
|
|
return item.getServiceUID() |
262
|
|
|
|
263
|
|
|
bsc = api.get_tool("bika_setup_catalog") |
264
|
|
|
brains = bsc(portal_type='AnalysisService', getKeyword=item) |
265
|
|
|
if brains: |
266
|
|
|
return brains[0].UID |
267
|
|
|
brains = bsc(portal_type='AnalysisService', title=item) |
268
|
|
|
if brains: |
269
|
|
|
return brains[0].UID |
270
|
|
|
raise RuntimeError( |
271
|
|
|
str(item) + " should be the UID, title, keyword " |
272
|
|
|
" or title of an AnalysisService.") |
273
|
|
|
|
274
|
|
|
# Maybe only a single item was passed |
275
|
|
|
if type(items) not in (list, tuple): |
276
|
|
|
items = [items, ] |
277
|
|
|
service_uids = map(resolve_to_uid, list(set(items))) |
278
|
|
|
return list(set(service_uids)) |
279
|
|
|
|
280
|
|
|
|
281
|
|
|
def notify_rejection(analysisrequest): |
282
|
|
|
""" |
283
|
|
|
Notifies via email that a given Analysis Request has been rejected. The |
284
|
|
|
notification is sent to the Client contacts assigned to the Analysis |
285
|
|
|
Request. |
286
|
|
|
|
287
|
|
|
:param analysisrequest: Analysis Request to which the notification refers |
288
|
|
|
:returns: true if success |
289
|
|
|
""" |
290
|
|
|
|
291
|
|
|
# We do this imports here to avoid circular dependencies until we deal |
292
|
|
|
# better with this notify_rejection thing. |
293
|
|
|
from bika.lims.browser.analysisrequest.reject import \ |
294
|
|
|
AnalysisRequestRejectPdfView, AnalysisRequestRejectEmailView |
295
|
|
|
|
296
|
|
|
arid = analysisrequest.getId() |
297
|
|
|
|
298
|
|
|
# This is the template to render for the pdf that will be either attached |
299
|
|
|
# to the email and attached the the Analysis Request for further access |
300
|
|
|
tpl = AnalysisRequestRejectPdfView(analysisrequest, analysisrequest.REQUEST) |
301
|
|
|
html = tpl.template() |
302
|
|
|
html = safe_unicode(html).encode('utf-8') |
303
|
|
|
filename = '%s-rejected' % arid |
304
|
|
|
pdf_fn = tempfile.mktemp(suffix=".pdf") |
305
|
|
|
pdf = createPdf(htmlreport=html, outfile=pdf_fn) |
306
|
|
|
if pdf: |
307
|
|
|
# Attach the pdf to the Analysis Request |
308
|
|
|
attid = analysisrequest.aq_parent.generateUniqueId('Attachment') |
309
|
|
|
att = _createObjectByType( |
310
|
|
|
"Attachment", analysisrequest.aq_parent, attid) |
311
|
|
|
att.setAttachmentFile(open(pdf_fn)) |
312
|
|
|
# Awkward workaround to rename the file |
313
|
|
|
attf = att.getAttachmentFile() |
314
|
|
|
attf.filename = '%s.pdf' % filename |
315
|
|
|
att.setAttachmentFile(attf) |
316
|
|
|
att.unmarkCreationFlag() |
317
|
|
|
renameAfterCreation(att) |
318
|
|
|
analysisrequest.addAttachment(att) |
319
|
|
|
os.remove(pdf_fn) |
320
|
|
|
|
321
|
|
|
# This is the message for the email's body |
322
|
|
|
tpl = AnalysisRequestRejectEmailView( |
323
|
|
|
analysisrequest, analysisrequest.REQUEST) |
324
|
|
|
html = tpl.template() |
325
|
|
|
html = safe_unicode(html).encode('utf-8') |
326
|
|
|
|
327
|
|
|
# compose and send email. |
328
|
|
|
mailto = [] |
329
|
|
|
lab = analysisrequest.bika_setup.laboratory |
330
|
|
|
mailfrom = formataddr((encode_header(lab.getName()), lab.getEmailAddress())) |
331
|
|
|
mailsubject = _('%s has been rejected') % arid |
332
|
|
|
contacts = [analysisrequest.getContact()] + analysisrequest.getCCContact() |
333
|
|
|
for contact in contacts: |
334
|
|
|
name = to_utf8(contact.getFullname()) |
335
|
|
|
email = to_utf8(contact.getEmailAddress()) |
336
|
|
|
if email: |
337
|
|
|
mailto.append(formataddr((encode_header(name), email))) |
338
|
|
|
if not mailto: |
339
|
|
|
return False |
340
|
|
|
mime_msg = MIMEMultipart('related') |
341
|
|
|
mime_msg['Subject'] = mailsubject |
342
|
|
|
mime_msg['From'] = mailfrom |
343
|
|
|
mime_msg['To'] = ','.join(mailto) |
344
|
|
|
mime_msg.preamble = 'This is a multi-part MIME message.' |
345
|
|
|
msg_txt = MIMEText(html, _subtype='html') |
346
|
|
|
mime_msg.attach(msg_txt) |
347
|
|
|
if pdf: |
348
|
|
|
attachPdf(mime_msg, pdf, filename) |
349
|
|
|
|
350
|
|
|
try: |
351
|
|
|
host = getToolByName(analysisrequest, 'MailHost') |
352
|
|
|
host.send(mime_msg.as_string(), immediate=True) |
353
|
|
|
except: |
354
|
|
|
logger.warning( |
355
|
|
|
"Email with subject %s was not sent (SMTP connection error)" % mailsubject) |
356
|
|
|
|
357
|
|
|
return True |
358
|
|
|
|
359
|
|
|
|
360
|
|
|
def create_retest(ar): |
361
|
|
|
"""Creates a retest (Analysis Request) from an invalidated Analysis Request |
362
|
|
|
:param ar: The invalidated Analysis Request |
363
|
|
|
:type ar: IAnalysisRequest |
364
|
|
|
:rtype: IAnalysisRequest |
365
|
|
|
""" |
366
|
|
|
if not ar: |
367
|
|
|
raise ValueError("Source Analysis Request cannot be None") |
368
|
|
|
|
369
|
|
|
if not IAnalysisRequest.providedBy(ar): |
370
|
|
|
raise ValueError("Type not supported: {}".format(repr(type(ar)))) |
371
|
|
|
|
372
|
|
|
if ar.getRetest(): |
373
|
|
|
# Do not allow the creation of another retest! |
374
|
|
|
raise ValueError("Retest already set") |
375
|
|
|
|
376
|
|
|
if not ar.isInvalid(): |
377
|
|
|
# Analysis Request must be in 'invalid' state |
378
|
|
|
raise ValueError("Cannot do a retest from an invalid Analysis Request" |
379
|
|
|
.format(repr(ar))) |
380
|
|
|
|
381
|
|
|
# Open the actions pool |
382
|
|
|
actions_pool = ActionHandlerPool.get_instance() |
383
|
|
|
actions_pool.queue_pool() |
384
|
|
|
|
385
|
|
|
# Create the Retest (Analysis Request) |
386
|
|
|
ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample'] |
387
|
|
|
retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID()) |
388
|
|
|
copy_field_values(ar, retest, ignore_fieldnames=ignore) |
389
|
|
|
|
390
|
|
|
# Mark the retest with the `IAnalysisRequestRetest` interface |
391
|
|
|
alsoProvides(retest, IAnalysisRequestRetest) |
392
|
|
|
|
393
|
|
|
# Assign the source to retest |
394
|
|
|
retest.setInvalidated(ar) |
395
|
|
|
|
396
|
|
|
# Rename the retest according to the ID server setup |
397
|
|
|
renameAfterCreation(retest) |
398
|
|
|
|
399
|
|
|
# Copy the analyses from the source |
400
|
|
|
intermediate_states = ['retracted', 'reflexed'] |
401
|
|
|
for an in ar.getAnalyses(full_objects=True): |
402
|
|
|
if (api.get_workflow_status_of(an) in intermediate_states): |
403
|
|
|
# Exclude intermediate analyses |
404
|
|
|
continue |
405
|
|
|
|
406
|
|
|
nan = _createObjectByType("Analysis", retest, an.getKeyword()) |
407
|
|
|
|
408
|
|
|
# Make a copy |
409
|
|
|
ignore_fieldnames = ['DataAnalysisPublished'] |
410
|
|
|
copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames) |
411
|
|
|
nan.unmarkCreationFlag() |
412
|
|
|
push_reindex_to_actions_pool(nan) |
413
|
|
|
|
414
|
|
|
# Transition the retest to "sample_received"! |
415
|
|
|
changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received') |
416
|
|
|
alsoProvides(retest, IReceived) |
417
|
|
|
|
418
|
|
|
# Initialize analyses |
419
|
|
|
for analysis in retest.getAnalyses(full_objects=True): |
420
|
|
|
if not IRoutineAnalysis.providedBy(analysis): |
421
|
|
|
continue |
422
|
|
|
changeWorkflowState(analysis, "bika_analysis_workflow", "unassigned") |
423
|
|
|
|
424
|
|
|
# Reindex and other stuff |
425
|
|
|
push_reindex_to_actions_pool(retest) |
426
|
|
|
push_reindex_to_actions_pool(retest.aq_parent) |
427
|
|
|
|
428
|
|
|
# Resume the actions pool |
429
|
|
|
actions_pool.resume() |
430
|
|
|
return retest |
431
|
|
|
|
432
|
|
|
|
433
|
|
|
def create_partition(analysis_request, request, analyses, sample_type=None, |
434
|
|
|
container=None, preservation=None, skip_fields=None, |
435
|
|
|
remove_primary_analyses=True, internal_use=True): |
436
|
|
|
""" |
437
|
|
|
Creates a partition for the analysis_request (primary) passed in |
438
|
|
|
:param analysis_request: uid/brain/object of IAnalysisRequest type |
439
|
|
|
:param request: the current request object |
440
|
|
|
:param analyses: uids/brains/objects of IAnalysis type |
441
|
|
|
:param sampletype: uid/brain/object of SampleType |
442
|
|
|
:param container: uid/brain/object of Container |
443
|
|
|
:param preservation: uid/brain/object of Preservation |
444
|
|
|
:param skip_fields: names of fields to be skipped on copy from primary |
445
|
|
|
:param remove_primary_analyses: removes the analyses from the parent |
446
|
|
|
:return: the new partition |
447
|
|
|
""" |
448
|
|
|
partition_skip_fields = [ |
449
|
|
|
"Analyses", |
450
|
|
|
"Attachment", |
451
|
|
|
"Client", |
452
|
|
|
"DetachedFrom", |
453
|
|
|
"Profile", |
454
|
|
|
"Profiles", |
455
|
|
|
"RejectionReasons", |
456
|
|
|
"Remarks", |
457
|
|
|
"ResultsInterpretation", |
458
|
|
|
"ResultsInterpretationDepts", |
459
|
|
|
"Sample", |
460
|
|
|
"Template", |
461
|
|
|
"creation_date", |
462
|
|
|
"id", |
463
|
|
|
"modification_date", |
464
|
|
|
"ParentAnalysisRequest", |
465
|
|
|
"PrimaryAnalysisRequest", |
466
|
|
|
] |
467
|
|
|
if skip_fields: |
468
|
|
|
partition_skip_fields.extend(skip_fields) |
469
|
|
|
partition_skip_fields = list(set(partition_skip_fields)) |
470
|
|
|
|
471
|
|
|
# Copy field values from the primary analysis request |
472
|
|
|
ar = api.get_object(analysis_request) |
473
|
|
|
record = fields_to_dict(ar, partition_skip_fields) |
474
|
|
|
|
475
|
|
|
# Update with values that are partition-specific |
476
|
|
|
record.update({ |
477
|
|
|
"InternalUse": internal_use, |
478
|
|
|
"ParentAnalysisRequest": api.get_uid(ar), |
479
|
|
|
}) |
480
|
|
|
if sample_type is not None: |
481
|
|
|
record["SampleType"] = sample_type and api.get_uid(sample_type) or "" |
482
|
|
|
if container is not None: |
483
|
|
|
record["Container"] = container and api.get_uid(container) or "" |
484
|
|
|
if preservation is not None: |
485
|
|
|
record["Preservation"] = preservation and api.get_uid(preservation) or "" |
486
|
|
|
|
487
|
|
|
# Create the Partition |
488
|
|
|
client = ar.getClient() |
489
|
|
|
analyses = list(set(map(api.get_object, analyses))) |
490
|
|
|
services = map(lambda an: an.getAnalysisService(), analyses) |
491
|
|
|
specs = ar.getSpecification() |
492
|
|
|
specs = specs and specs.getResultsRange() or [] |
493
|
|
|
partition = create_analysisrequest(client, request=request, values=record, |
494
|
|
|
analyses=services, specifications=specs) |
495
|
|
|
|
496
|
|
|
# Remove analyses from the primary |
497
|
|
|
if remove_primary_analyses: |
498
|
|
|
analyses_ids = map(api.get_id, analyses) |
499
|
|
|
ar.manage_delObjects(analyses_ids) |
500
|
|
|
|
501
|
|
|
# Reindex Parent Analysis Request |
502
|
|
|
ar.reindexObject(idxs=["isRootAncestor"]) |
503
|
|
|
|
504
|
|
|
# Manually set the Date Received to match with its parent. This is |
505
|
|
|
# necessary because crar calls to processForm, so DateReceived is not |
506
|
|
|
# set because the partition has not been received yet |
507
|
|
|
partition.setDateReceived(ar.getDateReceived()) |
508
|
|
|
partition.reindexObject(idxs="getDateReceived") |
509
|
|
|
|
510
|
|
|
# Force partition to same status as the primary |
511
|
|
|
status = api.get_workflow_status_of(ar) |
512
|
|
|
changeWorkflowState(partition, "bika_ar_workflow", status) |
513
|
|
|
if IReceived.providedBy(ar): |
514
|
|
|
alsoProvides(partition, IReceived) |
515
|
|
|
|
516
|
|
|
# And initialize the analyses the partition contains. This is required |
517
|
|
|
# here because the transition "initialize" of analyses rely on a guard, |
518
|
|
|
# so the initialization can only be performed when the sample has been |
519
|
|
|
# received (DateReceived is set) |
520
|
|
|
ActionHandlerPool.get_instance().queue_pool() |
521
|
|
|
for analysis in partition.getAnalyses(full_objects=True): |
522
|
|
|
doActionFor(analysis, "initialize") |
523
|
|
|
ActionHandlerPool.get_instance().resume() |
524
|
|
|
return partition |
525
|
|
|
|
526
|
|
|
|
527
|
|
|
def fields_to_dict(obj, skip_fields=None): |
528
|
|
|
""" |
529
|
|
|
Generates a dictionary with the field values of the object passed in, where |
530
|
|
|
keys are the field names. Skips computed fields |
531
|
|
|
""" |
532
|
|
|
data = {} |
533
|
|
|
obj = api.get_object(obj) |
534
|
|
|
for field_name, field in api.get_fields(obj).items(): |
535
|
|
|
if skip_fields and field_name in skip_fields: |
536
|
|
|
continue |
537
|
|
|
if field.type == "computed": |
538
|
|
|
continue |
539
|
|
|
data[field_name] = field.get(obj) |
540
|
|
|
return data |
541
|
|
|
|
542
|
|
|
|
543
|
|
|
def resolve_rejection_reasons(values): |
544
|
|
|
"""Resolves the rejection reasons from the submitted values to the format |
545
|
|
|
supported by Sample's Rejection Reason field |
546
|
|
|
""" |
547
|
|
|
rejection_reasons = values.get("RejectionReasons") |
548
|
|
|
if not rejection_reasons: |
549
|
|
|
return [] |
550
|
|
|
|
551
|
|
|
# Predefined reasons selected? |
552
|
|
|
selected = rejection_reasons[0] or {} |
553
|
|
|
if selected.get("checkbox") == "on": |
554
|
|
|
selected = selected.get("multiselection") or [] |
555
|
|
|
else: |
556
|
|
|
selected = [] |
557
|
|
|
|
558
|
|
|
# Other reasons set? |
559
|
|
|
other = values.get("RejectionReasons.textfield") |
560
|
|
|
if other: |
561
|
|
|
other = other[0] or {} |
562
|
|
|
other = other.get("other", "") |
563
|
|
|
else: |
564
|
|
|
other = "" |
565
|
|
|
|
566
|
|
|
# If neither selected nor other reasons are set, return empty |
567
|
|
|
if any([selected, other]): |
568
|
|
|
return [{"selected": selected, "other": other}] |
569
|
|
|
|
570
|
|
|
return [] |
571
|
|
|
|