1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# This file is part of SENAITE.CORE. |
4
|
|
|
# |
5
|
|
|
# SENAITE.CORE is free software: you can redistribute it and/or modify it under |
6
|
|
|
# the terms of the GNU General Public License as published by the Free Software |
7
|
|
|
# Foundation, version 2. |
8
|
|
|
# |
9
|
|
|
# This program is distributed in the hope that it will be useful, but WITHOUT |
10
|
|
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS |
11
|
|
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more |
12
|
|
|
# details. |
13
|
|
|
# |
14
|
|
|
# You should have received a copy of the GNU General Public License along with |
15
|
|
|
# this program; if not, write to the Free Software Foundation, Inc., 51 |
16
|
|
|
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
17
|
|
|
# |
18
|
|
|
# Copyright 2018-2019 by it's authors. |
19
|
|
|
# Some rights reserved, see README and LICENSE. |
20
|
|
|
|
21
|
|
|
import os |
22
|
|
|
import tempfile |
23
|
|
|
from email.mime.multipart import MIMEMultipart |
24
|
|
|
from email.mime.text import MIMEText |
25
|
|
|
|
26
|
|
|
from Products.CMFCore.utils import getToolByName |
27
|
|
|
from Products.CMFPlone.utils import _createObjectByType |
28
|
|
|
from Products.CMFPlone.utils import safe_unicode |
29
|
|
|
from bika.lims import api |
30
|
|
|
from bika.lims import bikaMessageFactory as _ |
31
|
|
|
from bika.lims import logger |
32
|
|
|
from bika.lims.idserver import renameAfterCreation |
33
|
|
|
from bika.lims.interfaces import IAnalysisRequest, IReceived |
34
|
|
|
from bika.lims.interfaces import IAnalysisRequestRetest |
35
|
|
|
from bika.lims.interfaces import IAnalysisRequestSecondary |
36
|
|
|
from bika.lims.interfaces import IAnalysisService |
37
|
|
|
from bika.lims.interfaces import IRoutineAnalysis |
38
|
|
|
from bika.lims.utils import attachPdf |
39
|
|
|
from bika.lims.utils import changeWorkflowState |
40
|
|
|
from bika.lims.utils import copy_field_values |
41
|
|
|
from bika.lims.utils import createPdf |
42
|
|
|
from bika.lims.utils import encode_header |
43
|
|
|
from bika.lims.utils import tmpID |
44
|
|
|
from bika.lims.utils import to_utf8 |
45
|
|
|
from bika.lims.workflow import ActionHandlerPool |
46
|
|
|
from bika.lims.workflow import doActionFor |
47
|
|
|
from bika.lims.workflow import push_reindex_to_actions_pool |
48
|
|
|
from bika.lims.workflow.analysisrequest import AR_WORKFLOW_ID |
49
|
|
|
from bika.lims.workflow.analysisrequest import do_action_to_analyses |
50
|
|
|
from email.Utils import formataddr |
51
|
|
|
from zope.interface import alsoProvides |
52
|
|
|
|
53
|
|
|
|
54
|
|
|
def create_analysisrequest(client, request, values, analyses=None, |
55
|
|
|
partitions=None, specifications=None, prices=None): |
56
|
|
|
"""This is meant for general use and should do everything necessary to |
57
|
|
|
create and initialise an AR and any other required auxilliary objects |
58
|
|
|
(Sample, SamplePartition, Analysis...) |
59
|
|
|
:param client: |
60
|
|
|
The container (Client) in which the ARs will be created. |
61
|
|
|
:param request: |
62
|
|
|
The current Request object. |
63
|
|
|
:param values: |
64
|
|
|
a dict, where keys are AR|Sample schema field names. |
65
|
|
|
:param analyses: |
66
|
|
|
Analysis services list. If specified, augments the values in |
67
|
|
|
values['Analyses']. May consist of service objects, UIDs, or Keywords. |
68
|
|
|
:param partitions: |
69
|
|
|
A list of dictionaries, if specific partitions are required. If not |
70
|
|
|
specified, AR's sample is created with a single partition. |
71
|
|
|
:param specifications: |
72
|
|
|
These values augment those found in values['Specifications'] |
73
|
|
|
:param prices: |
74
|
|
|
Allow different prices to be set for analyses. If not set, prices |
75
|
|
|
are read from the associated analysis service. |
76
|
|
|
""" |
77
|
|
|
# Don't pollute the dict param passed in |
78
|
|
|
values = dict(values.items()) |
79
|
|
|
|
80
|
|
|
# Create the Analysis Request |
81
|
|
|
ar = _createObjectByType('AnalysisRequest', client, tmpID()) |
82
|
|
|
ar.processForm(REQUEST=request, values=values) |
83
|
|
|
|
84
|
|
|
# Resolve the services uids and set the analyses for this Analysis Request |
85
|
|
|
service_uids = get_services_uids(context=client, values=values, |
86
|
|
|
analyses_serv=analyses) |
87
|
|
|
ar.setAnalyses(service_uids, prices=prices, specs=specifications) |
88
|
|
|
|
89
|
|
|
# Handle rejection reasons |
90
|
|
|
rejection_reasons = resolve_rejection_reasons(values) |
91
|
|
|
ar.setRejectionReasons(rejection_reasons) |
92
|
|
|
|
93
|
|
|
# Handle secondary Analysis Request |
94
|
|
|
primary = ar.getPrimaryAnalysisRequest() |
95
|
|
|
if primary: |
96
|
|
|
# Mark the secondary with the `IAnalysisRequestSecondary` interface |
97
|
|
|
alsoProvides(ar, IAnalysisRequestSecondary) |
98
|
|
|
|
99
|
|
|
# Rename the secondary according to the ID server setup |
100
|
|
|
renameAfterCreation(ar) |
101
|
|
|
|
102
|
|
|
# Set dates to match with those from the primary |
103
|
|
|
ar.setDateSampled(primary.getDateSampled()) |
104
|
|
|
ar.setSamplingDate(primary.getSamplingDate()) |
105
|
|
|
ar.setDateReceived(primary.getDateReceived()) |
106
|
|
|
|
107
|
|
|
# Force the transition of the secondary to received and set the |
108
|
|
|
# description/comment in the transition accordingly. |
109
|
|
|
if primary.getDateReceived(): |
110
|
|
|
primary_id = primary.getId() |
111
|
|
|
comment = "Auto-received. Secondary Sample of {}".format(primary_id) |
112
|
|
|
changeWorkflowState(ar, AR_WORKFLOW_ID, "sample_received", |
113
|
|
|
action="receive", comments=comment) |
114
|
|
|
|
115
|
|
|
# Mark the secondary as received |
116
|
|
|
alsoProvides(ar, IReceived) |
117
|
|
|
|
118
|
|
|
# Initialize analyses |
119
|
|
|
do_action_to_analyses(ar, "initialize") |
120
|
|
|
|
121
|
|
|
# Reindex the AR |
122
|
|
|
ar.reindexObject() |
123
|
|
|
|
124
|
|
|
# If rejection reasons have been set, reject automatically |
125
|
|
|
if rejection_reasons: |
126
|
|
|
doActionFor(ar, "reject") |
127
|
|
|
|
128
|
|
|
# In "received" state already |
129
|
|
|
return ar |
130
|
|
|
|
131
|
|
|
# Try first with no sampling transition, cause it is the most common config |
132
|
|
|
success, message = doActionFor(ar, "no_sampling_workflow") |
133
|
|
|
if not success: |
134
|
|
|
doActionFor(ar, "to_be_sampled") |
135
|
|
|
|
136
|
|
|
# If rejection reasons have been set, reject the sample automatically |
137
|
|
|
if rejection_reasons: |
138
|
|
|
doActionFor(ar, "reject") |
139
|
|
|
|
140
|
|
|
return ar |
141
|
|
|
|
142
|
|
|
|
143
|
|
|
def get_services_uids(context=None, analyses_serv=None, values=None): |
144
|
|
|
""" |
145
|
|
|
This function returns a list of UIDs from analyses services from its |
146
|
|
|
parameters. |
147
|
|
|
:param analyses_serv: A list (or one object) of service-related info items. |
148
|
|
|
see _resolve_items_to_service_uids() docstring. |
149
|
|
|
:type analyses_serv: list |
150
|
|
|
:param values: a dict, where keys are AR|Sample schema field names. |
151
|
|
|
:type values: dict |
152
|
|
|
:returns: a list of analyses services UIDs |
153
|
|
|
""" |
154
|
|
|
if not analyses_serv: |
155
|
|
|
analyses_serv = [] |
156
|
|
|
if not values: |
157
|
|
|
values = {} |
158
|
|
|
|
159
|
|
|
if not context or (not analyses_serv and not values): |
160
|
|
|
raise RuntimeError( |
161
|
|
|
"get_services_uids: Missing or wrong parameters.") |
162
|
|
|
|
163
|
|
|
# Merge analyses from analyses_serv and values into one list |
164
|
|
|
analyses_services = analyses_serv + (values.get("Analyses", None) or []) |
165
|
|
|
|
166
|
|
|
# It is possible to create analysis requests |
167
|
|
|
# by JSON petitions and services, profiles or types aren't allways send. |
168
|
|
|
# Sometimes we can get analyses and profiles that doesn't match and we |
169
|
|
|
# should act in consequence. |
170
|
|
|
# Getting the analyses profiles |
171
|
|
|
analyses_profiles = values.get('Profiles', []) |
172
|
|
|
if not isinstance(analyses_profiles, (list, tuple)): |
173
|
|
|
# Plone converts the incoming form value to a list, if there are |
174
|
|
|
# multiple values; but if not, it will send a string (a single UID). |
175
|
|
|
analyses_profiles = [analyses_profiles] |
176
|
|
|
|
177
|
|
|
if not analyses_services and not analyses_profiles: |
178
|
|
|
return [] |
179
|
|
|
|
180
|
|
|
# Add analysis services UIDs from profiles to analyses_services variable. |
181
|
|
|
if analyses_profiles: |
182
|
|
|
uid_catalog = getToolByName(context, 'uid_catalog') |
183
|
|
|
for brain in uid_catalog(UID=analyses_profiles): |
184
|
|
|
profile = api.get_object(brain) |
185
|
|
|
# Only services UIDs |
186
|
|
|
services_uids = profile.getRawService() |
187
|
|
|
# _resolve_items_to_service_uids() will remove duplicates |
188
|
|
|
analyses_services += services_uids |
189
|
|
|
|
190
|
|
|
return _resolve_items_to_service_uids(analyses_services) |
191
|
|
|
|
192
|
|
|
|
193
|
|
|
def _resolve_items_to_service_uids(items): |
194
|
|
|
""" Returns a list of service uids without duplicates based on the items |
195
|
|
|
:param items: |
196
|
|
|
A list (or one object) of service-related info items. The list can be |
197
|
|
|
heterogeneous and each item can be: |
198
|
|
|
- Analysis Service instance |
199
|
|
|
- Analysis instance |
200
|
|
|
- Analysis Service title |
201
|
|
|
- Analysis Service UID |
202
|
|
|
- Analysis Service Keyword |
203
|
|
|
If an item that doesn't match any of the criterias above is found, the |
204
|
|
|
function will raise a RuntimeError |
205
|
|
|
""" |
206
|
|
|
def resolve_to_uid(item): |
207
|
|
|
if api.is_uid(item): |
208
|
|
|
return item |
209
|
|
|
elif IAnalysisService.providedBy(item): |
210
|
|
|
return item.UID() |
211
|
|
|
elif IRoutineAnalysis.providedBy(item): |
212
|
|
|
return item.getServiceUID() |
213
|
|
|
|
214
|
|
|
bsc = api.get_tool("bika_setup_catalog") |
215
|
|
|
brains = bsc(portal_type='AnalysisService', getKeyword=item) |
216
|
|
|
if brains: |
217
|
|
|
return brains[0].UID |
218
|
|
|
brains = bsc(portal_type='AnalysisService', title=item) |
219
|
|
|
if brains: |
220
|
|
|
return brains[0].UID |
221
|
|
|
raise RuntimeError( |
222
|
|
|
str(item) + " should be the UID, title, keyword " |
223
|
|
|
" or title of an AnalysisService.") |
224
|
|
|
|
225
|
|
|
# Maybe only a single item was passed |
226
|
|
|
if type(items) not in (list, tuple): |
227
|
|
|
items = [items, ] |
228
|
|
|
service_uids = map(resolve_to_uid, list(set(items))) |
229
|
|
|
return list(set(service_uids)) |
230
|
|
|
|
231
|
|
|
|
232
|
|
|
def notify_rejection(analysisrequest): |
233
|
|
|
""" |
234
|
|
|
Notifies via email that a given Analysis Request has been rejected. The |
235
|
|
|
notification is sent to the Client contacts assigned to the Analysis |
236
|
|
|
Request. |
237
|
|
|
|
238
|
|
|
:param analysisrequest: Analysis Request to which the notification refers |
239
|
|
|
:returns: true if success |
240
|
|
|
""" |
241
|
|
|
|
242
|
|
|
# We do this imports here to avoid circular dependencies until we deal |
243
|
|
|
# better with this notify_rejection thing. |
244
|
|
|
from bika.lims.browser.analysisrequest.reject import \ |
245
|
|
|
AnalysisRequestRejectPdfView, AnalysisRequestRejectEmailView |
246
|
|
|
|
247
|
|
|
arid = analysisrequest.getId() |
248
|
|
|
|
249
|
|
|
# This is the template to render for the pdf that will be either attached |
250
|
|
|
# to the email and attached the the Analysis Request for further access |
251
|
|
|
tpl = AnalysisRequestRejectPdfView(analysisrequest, analysisrequest.REQUEST) |
252
|
|
|
html = tpl.template() |
253
|
|
|
html = safe_unicode(html).encode('utf-8') |
254
|
|
|
filename = '%s-rejected' % arid |
255
|
|
|
pdf_fn = tempfile.mktemp(suffix=".pdf") |
256
|
|
|
pdf = createPdf(htmlreport=html, outfile=pdf_fn) |
257
|
|
|
if pdf: |
258
|
|
|
# Attach the pdf to the Analysis Request |
259
|
|
|
attid = analysisrequest.aq_parent.generateUniqueId('Attachment') |
260
|
|
|
att = _createObjectByType( |
261
|
|
|
"Attachment", analysisrequest.aq_parent, attid) |
262
|
|
|
att.setAttachmentFile(open(pdf_fn)) |
263
|
|
|
# Awkward workaround to rename the file |
264
|
|
|
attf = att.getAttachmentFile() |
265
|
|
|
attf.filename = '%s.pdf' % filename |
266
|
|
|
att.setAttachmentFile(attf) |
267
|
|
|
att.unmarkCreationFlag() |
268
|
|
|
renameAfterCreation(att) |
269
|
|
|
analysisrequest.addAttachment(att) |
270
|
|
|
os.remove(pdf_fn) |
271
|
|
|
|
272
|
|
|
# This is the message for the email's body |
273
|
|
|
tpl = AnalysisRequestRejectEmailView( |
274
|
|
|
analysisrequest, analysisrequest.REQUEST) |
275
|
|
|
html = tpl.template() |
276
|
|
|
html = safe_unicode(html).encode('utf-8') |
277
|
|
|
|
278
|
|
|
# compose and send email. |
279
|
|
|
mailto = [] |
280
|
|
|
lab = analysisrequest.bika_setup.laboratory |
281
|
|
|
mailfrom = formataddr((encode_header(lab.getName()), lab.getEmailAddress())) |
282
|
|
|
mailsubject = _('%s has been rejected') % arid |
283
|
|
|
contacts = [analysisrequest.getContact()] + analysisrequest.getCCContact() |
284
|
|
|
for contact in contacts: |
285
|
|
|
name = to_utf8(contact.getFullname()) |
286
|
|
|
email = to_utf8(contact.getEmailAddress()) |
287
|
|
|
if email: |
288
|
|
|
mailto.append(formataddr((encode_header(name), email))) |
289
|
|
|
if not mailto: |
290
|
|
|
return False |
291
|
|
|
mime_msg = MIMEMultipart('related') |
292
|
|
|
mime_msg['Subject'] = mailsubject |
293
|
|
|
mime_msg['From'] = mailfrom |
294
|
|
|
mime_msg['To'] = ','.join(mailto) |
295
|
|
|
mime_msg.preamble = 'This is a multi-part MIME message.' |
296
|
|
|
msg_txt = MIMEText(html, _subtype='html') |
297
|
|
|
mime_msg.attach(msg_txt) |
298
|
|
|
if pdf: |
299
|
|
|
attachPdf(mime_msg, pdf, filename) |
300
|
|
|
|
301
|
|
|
try: |
302
|
|
|
host = getToolByName(analysisrequest, 'MailHost') |
303
|
|
|
host.send(mime_msg.as_string(), immediate=True) |
304
|
|
|
except: |
305
|
|
|
logger.warning( |
306
|
|
|
"Email with subject %s was not sent (SMTP connection error)" % mailsubject) |
307
|
|
|
|
308
|
|
|
return True |
309
|
|
|
|
310
|
|
|
|
311
|
|
|
def create_retest(ar): |
312
|
|
|
"""Creates a retest (Analysis Request) from an invalidated Analysis Request |
313
|
|
|
:param ar: The invalidated Analysis Request |
314
|
|
|
:type ar: IAnalysisRequest |
315
|
|
|
:rtype: IAnalysisRequest |
316
|
|
|
""" |
317
|
|
|
if not ar: |
318
|
|
|
raise ValueError("Source Analysis Request cannot be None") |
319
|
|
|
|
320
|
|
|
if not IAnalysisRequest.providedBy(ar): |
321
|
|
|
raise ValueError("Type not supported: {}".format(repr(type(ar)))) |
322
|
|
|
|
323
|
|
|
if ar.getRetest(): |
324
|
|
|
# Do not allow the creation of another retest! |
325
|
|
|
raise ValueError("Retest already set") |
326
|
|
|
|
327
|
|
|
if not ar.isInvalid(): |
328
|
|
|
# Analysis Request must be in 'invalid' state |
329
|
|
|
raise ValueError("Cannot do a retest from an invalid Analysis Request" |
330
|
|
|
.format(repr(ar))) |
331
|
|
|
|
332
|
|
|
# Open the actions pool |
333
|
|
|
actions_pool = ActionHandlerPool.get_instance() |
334
|
|
|
actions_pool.queue_pool() |
335
|
|
|
|
336
|
|
|
# Create the Retest (Analysis Request) |
337
|
|
|
ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample'] |
338
|
|
|
retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID()) |
339
|
|
|
copy_field_values(ar, retest, ignore_fieldnames=ignore) |
340
|
|
|
|
341
|
|
|
# Mark the retest with the `IAnalysisRequestRetest` interface |
342
|
|
|
alsoProvides(retest, IAnalysisRequestRetest) |
343
|
|
|
|
344
|
|
|
# Assign the source to retest |
345
|
|
|
retest.setInvalidated(ar) |
346
|
|
|
|
347
|
|
|
# Rename the retest according to the ID server setup |
348
|
|
|
renameAfterCreation(retest) |
349
|
|
|
|
350
|
|
|
# Copy the analyses from the source |
351
|
|
|
intermediate_states = ['retracted', 'reflexed'] |
352
|
|
|
for an in ar.getAnalyses(full_objects=True): |
353
|
|
|
if (api.get_workflow_status_of(an) in intermediate_states): |
354
|
|
|
# Exclude intermediate analyses |
355
|
|
|
continue |
356
|
|
|
|
357
|
|
|
nan = _createObjectByType("Analysis", retest, an.getKeyword()) |
358
|
|
|
|
359
|
|
|
# Make a copy |
360
|
|
|
ignore_fieldnames = ['DataAnalysisPublished'] |
361
|
|
|
copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames) |
362
|
|
|
nan.unmarkCreationFlag() |
363
|
|
|
push_reindex_to_actions_pool(nan) |
364
|
|
|
|
365
|
|
|
# Transition the retest to "sample_received"! |
366
|
|
|
changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received') |
367
|
|
|
alsoProvides(retest, IReceived) |
368
|
|
|
|
369
|
|
|
# Initialize analyses |
370
|
|
|
for analysis in retest.getAnalyses(full_objects=True): |
371
|
|
|
if not IRoutineAnalysis.providedBy(analysis): |
372
|
|
|
continue |
373
|
|
|
changeWorkflowState(analysis, "bika_analysis_workflow", "unassigned") |
374
|
|
|
|
375
|
|
|
# Reindex and other stuff |
376
|
|
|
push_reindex_to_actions_pool(retest) |
377
|
|
|
push_reindex_to_actions_pool(retest.aq_parent) |
378
|
|
|
|
379
|
|
|
# Resume the actions pool |
380
|
|
|
actions_pool.resume() |
381
|
|
|
return retest |
382
|
|
|
|
383
|
|
|
|
384
|
|
|
def create_partition(analysis_request, request, analyses, sample_type=None, |
385
|
|
|
container=None, preservation=None, skip_fields=None, |
386
|
|
|
remove_primary_analyses=True): |
387
|
|
|
""" |
388
|
|
|
Creates a partition for the analysis_request (primary) passed in |
389
|
|
|
:param analysis_request: uid/brain/object of IAnalysisRequest type |
390
|
|
|
:param request: the current request object |
391
|
|
|
:param analyses: uids/brains/objects of IAnalysis type |
392
|
|
|
:param sampletype: uid/brain/object of SampleType |
393
|
|
|
:param container: uid/brain/object of Container |
394
|
|
|
:param preservation: uid/brain/object of Preservation |
395
|
|
|
:param skip_fields: names of fields to be skipped on copy from primary |
396
|
|
|
:param remove_primary_analyses: removes the analyses from the parent |
397
|
|
|
:return: the new partition |
398
|
|
|
""" |
399
|
|
|
partition_skip_fields = [ |
400
|
|
|
"Analyses", |
401
|
|
|
"Attachment", |
402
|
|
|
"Client", |
403
|
|
|
"Profile", |
404
|
|
|
"Profiles", |
405
|
|
|
"RejectionReasons", |
406
|
|
|
"Remarks", |
407
|
|
|
"ResultsInterpretation", |
408
|
|
|
"ResultsInterpretationDepts", |
409
|
|
|
"Sample", |
410
|
|
|
"Template", |
411
|
|
|
"creation_date", |
412
|
|
|
"id", |
413
|
|
|
"modification_date", |
414
|
|
|
"ParentAnalysisRequest", |
415
|
|
|
"PrimaryAnalysisRequest", |
416
|
|
|
] |
417
|
|
|
if skip_fields: |
418
|
|
|
partition_skip_fields.extend(skip_fields) |
419
|
|
|
partition_skip_fields = list(set(partition_skip_fields)) |
420
|
|
|
|
421
|
|
|
# Copy field values from the primary analysis request |
422
|
|
|
ar = api.get_object(analysis_request) |
423
|
|
|
record = fields_to_dict(ar, partition_skip_fields) |
424
|
|
|
|
425
|
|
|
# Update with values that are partition-specific |
426
|
|
|
record.update({ |
427
|
|
|
"InternalUse": True, |
428
|
|
|
"ParentAnalysisRequest": api.get_uid(ar), |
429
|
|
|
}) |
430
|
|
|
if sample_type is not None: |
431
|
|
|
record["SampleType"] = sample_type and api.get_uid(sample_type) or "" |
432
|
|
|
if container is not None: |
433
|
|
|
record["Container"] = container and api.get_uid(container) or "" |
434
|
|
|
if preservation is not None: |
435
|
|
|
record["Preservation"] = preservation and api.get_uid(preservation) or "" |
436
|
|
|
|
437
|
|
|
# Create the Partition |
438
|
|
|
client = ar.getClient() |
439
|
|
|
analyses = list(set(map(api.get_object, analyses))) |
440
|
|
|
services = map(lambda an: an.getAnalysisService(), analyses) |
441
|
|
|
specs = ar.getSpecification() |
442
|
|
|
specs = specs and specs.getResultsRange() or [] |
443
|
|
|
partition = create_analysisrequest(client, request=request, values=record, |
444
|
|
|
analyses=services, specifications=specs) |
445
|
|
|
|
446
|
|
|
# Remove analyses from the primary |
447
|
|
|
if remove_primary_analyses: |
448
|
|
|
analyses_ids = map(api.get_id, analyses) |
449
|
|
|
ar.manage_delObjects(analyses_ids) |
450
|
|
|
|
451
|
|
|
# Reindex Parent Analysis Request |
452
|
|
|
ar.reindexObject(idxs=["isRootAncestor"]) |
453
|
|
|
|
454
|
|
|
# Manually set the Date Received to match with its parent. This is |
455
|
|
|
# necessary because crar calls to processForm, so DateReceived is not |
456
|
|
|
# set because the partition has not been received yet |
457
|
|
|
partition.setDateReceived(ar.getDateReceived()) |
458
|
|
|
partition.reindexObject(idxs="getDateReceived") |
459
|
|
|
|
460
|
|
|
# Force partition to same status as the primary |
461
|
|
|
status = api.get_workflow_status_of(ar) |
462
|
|
|
changeWorkflowState(partition, "bika_ar_workflow", status) |
463
|
|
|
if IReceived.providedBy(ar): |
464
|
|
|
alsoProvides(partition, IReceived) |
465
|
|
|
|
466
|
|
|
# And initialize the analyses the partition contains. This is required |
467
|
|
|
# here because the transition "initialize" of analyses rely on a guard, |
468
|
|
|
# so the initialization can only be performed when the sample has been |
469
|
|
|
# received (DateReceived is set) |
470
|
|
|
ActionHandlerPool.get_instance().queue_pool() |
471
|
|
|
for analysis in partition.getAnalyses(full_objects=True): |
472
|
|
|
doActionFor(analysis, "initialize") |
473
|
|
|
ActionHandlerPool.get_instance().resume() |
474
|
|
|
return partition |
475
|
|
|
|
476
|
|
|
|
477
|
|
|
def fields_to_dict(obj, skip_fields=None): |
478
|
|
|
""" |
479
|
|
|
Generates a dictionary with the field values of the object passed in, where |
480
|
|
|
keys are the field names. Skips computed fields |
481
|
|
|
""" |
482
|
|
|
data = {} |
483
|
|
|
obj = api.get_object(obj) |
484
|
|
|
for field_name, field in api.get_fields(obj).items(): |
485
|
|
|
if skip_fields and field_name in skip_fields: |
486
|
|
|
continue |
487
|
|
|
if field.type == "computed": |
488
|
|
|
continue |
489
|
|
|
data[field_name] = field.get(obj) |
490
|
|
|
return data |
491
|
|
|
|
492
|
|
|
|
493
|
|
|
def resolve_rejection_reasons(values): |
494
|
|
|
"""Resolves the rejection reasons from the submitted values to the format |
495
|
|
|
supported by Sample's Rejection Reason field |
496
|
|
|
""" |
497
|
|
|
rejection_reasons = values.get("RejectionReasons") |
498
|
|
|
if not rejection_reasons: |
499
|
|
|
return [] |
500
|
|
|
|
501
|
|
|
# Predefined reasons selected? |
502
|
|
|
selected = rejection_reasons[0] or {} |
503
|
|
|
if selected.get("checkbox") == "on": |
504
|
|
|
selected = selected.get("multiselection") or [] |
505
|
|
|
else: |
506
|
|
|
selected = [] |
507
|
|
|
|
508
|
|
|
# Other reasons set? |
509
|
|
|
other = values.get("RejectionReasons.textfield") |
510
|
|
|
if other: |
511
|
|
|
other = other[0] or {} |
512
|
|
|
other = other.get("other", "") |
513
|
|
|
else: |
514
|
|
|
other = "" |
515
|
|
|
|
516
|
|
|
# If neither selected nor other reasons are set, return empty |
517
|
|
|
if any([selected, other]): |
518
|
|
|
return [{"selected": selected, "other": other}] |
519
|
|
|
|
520
|
|
|
return [] |
521
|
|
|
|