Passed
Push — 2.x ( bb8554...7f0140 )
by Jordi
06:37
created

create_analysisrequest()   C

Complexity

Conditions 9

Size

Total Lines 106
Code Lines 45

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 45
dl 0
loc 106
rs 6.4666
c 0
b 0
f 0
cc 9
nop 6

How to fix   Long Method   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
# -*- coding: utf-8 -*-
2
#
3
# This file is part of SENAITE.CORE.
4
#
5
# SENAITE.CORE is free software: you can redistribute it and/or modify it under
6
# the terms of the GNU General Public License as published by the Free Software
7
# Foundation, version 2.
8
#
9
# This program is distributed in the hope that it will be useful, but WITHOUT
10
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
12
# details.
13
#
14
# You should have received a copy of the GNU General Public License along with
15
# this program; if not, write to the Free Software Foundation, Inc., 51
16
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
#
18
# Copyright 2018-2021 by it's authors.
19
# Some rights reserved, see README and LICENSE.
20
21
import itertools
22
from string import Template
23
24
import six
25
from bika.lims import api
26
from bika.lims import bikaMessageFactory as _
27
from bika.lims import logger
28
from bika.lims.api.mail import compose_email
29
from bika.lims.api.mail import is_valid_email_address
30
from bika.lims.api.mail import send_email
31
from bika.lims.idserver import renameAfterCreation
32
from bika.lims.interfaces import IAnalysisRequest
33
from bika.lims.interfaces import IAnalysisRequestRetest
34
from bika.lims.interfaces import IAnalysisRequestSecondary
35
from bika.lims.interfaces import IAnalysisService
36
from bika.lims.interfaces import IReceived
37
from bika.lims.interfaces import IRoutineAnalysis
38
from bika.lims.utils import changeWorkflowState
39
from bika.lims.utils import copy_field_values
40
from bika.lims.utils import createPdf
41
from bika.lims.utils import get_link
42
from bika.lims.utils import tmpID
43
from bika.lims.workflow import ActionHandlerPool
44
from bika.lims.workflow import doActionFor
45
from bika.lims.workflow import push_reindex_to_actions_pool
46
from DateTime import DateTime
47
from Products.Archetypes.config import UID_CATALOG
48
from Products.Archetypes.event import ObjectInitializedEvent
49
from Products.CMFPlone.utils import _createObjectByType
50
from Products.CMFPlone.utils import safe_unicode
51
from senaite.core.catalog import SETUP_CATALOG
52
from senaite.core.permissions.sample import can_receive
53
from senaite.core.workflow import ANALYSIS_WORKFLOW
54
from senaite.core.workflow import SAMPLE_WORKFLOW
55
from zope import event
56
from zope.interface import alsoProvides
57
58
59
def create_analysisrequest(client, request, values, analyses=None,
60
                           results_ranges=None, prices=None):
61
    """Creates a new AnalysisRequest (a Sample) object
62
    :param client: The container where the Sample will be created
63
    :param request: The current Http Request object
64
    :param values: A dict, with keys as AnalaysisRequest's schema field names
65
    :param analyses: List of Services or Analyses (brains, objects, UIDs,
66
        keywords). Extends the list from values["Analyses"]
67
    :param results_ranges: List of Results Ranges. Extends the results ranges
68
        from the Specification object defined in values["Specification"]
69
    :param prices: Mapping of AnalysisService UID -> price. If not set, prices
70
        are read from the associated analysis service.
71
    """
72
    # Don't pollute the dict param passed in
73
    values = dict(values.items())
74
75
    # Explicitly set client instead of relying on the passed in vales.
76
    # This might happen if this function is called programmatically outside of
77
    # the sample add form.
78
    values["Client"] = client
79
80
    # Resolve the Service uids of analyses to be added in the Sample. Values
81
    # passed-in might contain Profiles and also values that are not uids. Also,
82
    # additional analyses can be passed-in through either values or services
83
    service_uids = to_services_uids(values=values, services=analyses)
84
85
    # Remove the Analyses from values. We will add them manually
86
    values.update({"Analyses": []})
87
88
    # Remove the specificaton to set it *after* the analyses have been added
89
    specification = values.pop("Specification", None)
90
91
    # Create the Analysis Request and submit the form
92
    ar = _createObjectByType("AnalysisRequest", client, tmpID())
93
    # mark the sample as temporary to avoid indexing
94
    api.mark_temporary(ar)
95
    # NOTE: We call here `_processForm` (with underscore) to manually unmark
96
    #       the creation flag and trigger the `ObjectInitializedEvent`, which
97
    #       is used for snapshot creation.
98
    ar._processForm(REQUEST=request, values=values)
99
100
    # Set the analyses manually
101
    ar.setAnalyses(service_uids, prices=prices, specs=results_ranges)
102
103
    # Explicitly set the specification to the sample
104
    if specification:
105
        ar.setSpecification(specification)
106
107
    # Handle hidden analyses from template and profiles
108
    # https://github.com/senaite/senaite.core/issues/1437
109
    # https://github.com/senaite/senaite.core/issues/1326
110
    apply_hidden_services(ar)
111
112
    # Handle rejection reasons
113
    rejection_reasons = resolve_rejection_reasons(values)
114
    ar.setRejectionReasons(rejection_reasons)
115
116
    # Handle secondary Analysis Request
117
    primary = ar.getPrimaryAnalysisRequest()
118
    if primary:
119
        # Mark the secondary with the `IAnalysisRequestSecondary` interface
120
        alsoProvides(ar, IAnalysisRequestSecondary)
121
122
        # Set dates to match with those from the primary
123
        ar.setDateSampled(primary.getDateSampled())
124
        ar.setSamplingDate(primary.getSamplingDate())
125
126
        # Force the transition of the secondary to received and set the
127
        # description/comment in the transition accordingly.
128
        date_received = primary.getDateReceived()
129
        if date_received:
130
            receive_sample(ar, date_received=date_received)
131
132
    parent_sample = ar.getParentAnalysisRequest()
133
    if parent_sample:
134
        # Always set partition to received
135
        date_received = parent_sample.getDateReceived()
136
        receive_sample(ar, date_received=date_received)
137
138
    if not IReceived.providedBy(ar):
139
        setup = api.get_setup()
140
        # Sampling is required
141
        if ar.getSamplingRequired():
142
            changeWorkflowState(ar, SAMPLE_WORKFLOW, "to_be_sampled",
143
                                action="to_be_sampled")
144
        elif setup.getAutoreceiveSamples():
145
            receive_sample(ar)
146
        else:
147
            changeWorkflowState(ar, SAMPLE_WORKFLOW, "sample_due",
148
                                action="no_sampling_workflow")
149
150
    renameAfterCreation(ar)
151
    # AT only
152
    ar.unmarkCreationFlag()
153
    # unmark the sample as temporary
154
    api.unmark_temporary(ar)
155
    # explicit reindexing after sample finalization
156
    reindex(ar)
157
    # notify object initialization (also creates a snapshot)
158
    event.notify(ObjectInitializedEvent(ar))
159
160
    # If rejection reasons have been set, reject the sample automatically
161
    if rejection_reasons:
162
        do_rejection(ar)
163
164
    return ar
165
166
167
def reindex(obj, recursive=False):
168
    """Reindex the object
169
170
    :param obj: The object to reindex
171
    :param recursive: If true, all child objects are reindexed recursively
172
    """
173
    obj.reindexObject()
174
    if recursive:
175
        for child in obj.objectValues():
176
            reindex(child)
177
178
179
def receive_sample(sample, check_permission=False, date_received=None):
180
    """Receive the sample without transition
181
    """
182
183
    # NOTE: In `sample_registered` state we do not grant any roles the
184
    #       permission to receive a sample! Not sure if this can be ignored
185
    #       when the LIMS is configured to auto-receive samples?
186
    if check_permission and not can_receive(sample):
187
        return False
188
189
    changeWorkflowState(sample, SAMPLE_WORKFLOW, "sample_received",
190
                        action="receive")
191
192
    # Mark the secondary as received
193
    alsoProvides(sample, IReceived)
194
    # Manually set the received date
195
    if not date_received:
196
        date_received = DateTime()
197
    sample.setDateReceived(date_received)
198
199
    # Initialize analyses
200
    # NOTE: We use here `objectValues` instead of `getAnalyses`,
201
    #       because the Analyses are not yet indexed!
202
    for obj in sample.objectValues():
203
        if obj.portal_type != "Analysis":
204
            continue
205
        changeWorkflowState(obj, ANALYSIS_WORKFLOW, "unassigned",
206
                            action="initialize")
207
208
    return True
209
210
211
def apply_hidden_services(sample):
212
    """
213
    Applies the hidden setting to the sample analyses in accordance with the
214
    settings from its template and/or profiles
215
    :param sample: the sample that contains the analyses
216
    """
217
    hidden = list()
218
219
    # Get the "hidden" service uids from the template
220
    template = sample.getTemplate()
221
    hidden = get_hidden_service_uids(template)
222
223
    # Get the "hidden" service uids from profiles
224
    profiles = sample.getProfiles()
225
    hid_profiles = map(get_hidden_service_uids, profiles)
226
    hid_profiles = list(itertools.chain(*hid_profiles))
227
    hidden.extend(hid_profiles)
228
229
    # Update the sample analyses
230
    analyses = sample.getAnalyses(full_objects=True)
231
    analyses = filter(lambda an: an.getServiceUID() in hidden, analyses)
232
    for analysis in analyses:
233
        analysis.setHidden(True)
234
235
236
def get_hidden_service_uids(profile_or_template):
237
    """Returns a list of service uids that are set as hidden
238
    :param profile_or_template: ARTemplate or AnalysisProfile object
239
    """
240
    if not profile_or_template:
241
        return []
242
    settings = profile_or_template.getAnalysisServicesSettings()
243
    hidden = filter(lambda ser: ser.get("hidden", False), settings)
244
    return map(lambda setting: setting["uid"], hidden)
245
246
247
def to_services_uids(services=None, values=None):
248
    """
249
    Returns a list of Analysis Services uids
250
    :param services: A list of service items (uid, keyword, brain, obj, title)
251
    :param values: a dict, where keys are AR|Sample schema field names.
252
    :returns: a list of Analyses Services UIDs
253
    """
254
    def to_list(value):
255
        if not value:
256
            return []
257
        if isinstance(value, six.string_types):
258
            return [value]
259
        if isinstance(value, (list, tuple)):
260
            return value
261
        logger.warn("Cannot convert to a list: {}".format(value))
262
        return []
263
264
    services = services or []
265
    values = values or {}
266
267
    # Merge analyses from analyses_serv and values into one list
268
    uids = to_list(services) + to_list(values.get("Analyses"))
269
270
    # Convert them to a list of service uids
271
    uids = filter(None, map(to_service_uid, uids))
272
273
    # Extend with service uids from profiles
274
    profiles = to_list(values.get("Profiles"))
275
    if profiles:
276
        uid_catalog = api.get_tool(UID_CATALOG)
277
        for brain in uid_catalog(UID=profiles):
278
            profile = api.get_object(brain)
279
            uids.extend(profile.getRawService() or [])
280
281
    # Get the service uids without duplicates, but preserving the order
282
    return list(dict.fromkeys(uids).keys())
283
284
285
def to_service_uid(uid_brain_obj_str):
286
    """Resolves the passed in element to a valid uid. Returns None if the value
287
    cannot be resolved to a valid uid
288
    """
289
    if api.is_uid(uid_brain_obj_str) and uid_brain_obj_str != "0":
290
        return uid_brain_obj_str
291
292
    if api.is_object(uid_brain_obj_str):
293
        obj = api.get_object(uid_brain_obj_str)
294
295
        if IAnalysisService.providedBy(obj):
296
            return api.get_uid(obj)
297
298
        elif IRoutineAnalysis.providedBy(obj):
299
            return obj.getServiceUID()
300
301
        else:
302
            logger.error("Type not supported: {}".format(obj.portal_type))
303
            return None
304
305
    if isinstance(uid_brain_obj_str, six.string_types):
306
        # Maybe is a keyword?
307
        query = dict(portal_type="AnalysisService", getKeyword=uid_brain_obj_str)
308
        brains = api.search(query, SETUP_CATALOG)
309
        if len(brains) == 1:
310
            return api.get_uid(brains[0])
311
312
        # Or maybe a title
313
        query = dict(portal_type="AnalysisService", title=uid_brain_obj_str)
314
        brains = api.search(query, SETUP_CATALOG)
315
        if len(brains) == 1:
316
            return api.get_uid(brains[0])
317
318
    return None
319
320
321
def create_retest(ar):
322
    """Creates a retest (Analysis Request) from an invalidated Analysis Request
323
    :param ar: The invalidated Analysis Request
324
    :type ar: IAnalysisRequest
325
    :rtype: IAnalysisRequest
326
    """
327
    if not ar:
328
        raise ValueError("Source Analysis Request cannot be None")
329
330
    if not IAnalysisRequest.providedBy(ar):
331
        raise ValueError("Type not supported: {}".format(repr(type(ar))))
332
333
    if ar.getRetest():
334
        # Do not allow the creation of another retest!
335
        raise ValueError("Retest already set")
336
337
    if not ar.isInvalid():
338
        # Analysis Request must be in 'invalid' state
339
        raise ValueError("Cannot do a retest from an invalid Analysis Request")
340
341
    # Open the actions pool
342
    actions_pool = ActionHandlerPool.get_instance()
343
    actions_pool.queue_pool()
344
345
    # Create the Retest (Analysis Request)
346
    ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample', 'Remarks']
347
    retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID())
348
    copy_field_values(ar, retest, ignore_fieldnames=ignore)
349
350
    # Mark the retest with the `IAnalysisRequestRetest` interface
351
    alsoProvides(retest, IAnalysisRequestRetest)
352
353
    # Assign the source to retest
354
    retest.setInvalidated(ar)
355
356
    # Rename the retest according to the ID server setup
357
    renameAfterCreation(retest)
358
359
    # Copy the analyses from the source
360
    intermediate_states = ['retracted', ]
361
    for an in ar.getAnalyses(full_objects=True):
362
        # skip retests
363
        if an.isRetest():
364
            continue
365
366
        if api.get_workflow_status_of(an) in intermediate_states:
367
            # Exclude intermediate analyses
368
            continue
369
370
        # Original sample might have multiple copies of same analysis
371
        keyword = an.getKeyword()
372
        analyses = retest.getAnalyses(full_objects=True)
373
        analyses = filter(lambda ret: ret.getKeyword() == keyword, analyses)
0 ignored issues
show
introduced by
The variable keyword does not seem to be defined for all execution paths.
Loading history...
374
        if analyses:
375
            keyword = '{}-{}'.format(keyword, len(analyses))
376
377
        # Create the analysis retest
378
        nan = _createObjectByType("Analysis", retest, keyword)
379
380
        # Make a copy
381
        ignore_fieldnames = ['DataAnalysisPublished']
382
        copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames)
383
        nan.unmarkCreationFlag()
384
        push_reindex_to_actions_pool(nan)
385
386
    # Transition the retest to "sample_received"!
387
    changeWorkflowState(retest, SAMPLE_WORKFLOW, 'sample_received')
388
    alsoProvides(retest, IReceived)
389
390
    # Initialize analyses
391
    for analysis in retest.getAnalyses(full_objects=True):
392
        if not IRoutineAnalysis.providedBy(analysis):
393
            continue
394
        changeWorkflowState(analysis, ANALYSIS_WORKFLOW, "unassigned")
395
396
    # Reindex and other stuff
397
    push_reindex_to_actions_pool(retest)
398
    push_reindex_to_actions_pool(retest.aq_parent)
399
400
    # Resume the actions pool
401
    actions_pool.resume()
402
    return retest
403
404
405
def create_partition(analysis_request, request, analyses, sample_type=None,
406
                     container=None, preservation=None, skip_fields=None,
407
                     internal_use=True):
408
    """
409
    Creates a partition for the analysis_request (primary) passed in
410
    :param analysis_request: uid/brain/object of IAnalysisRequest type
411
    :param request: the current request object
412
    :param analyses: uids/brains/objects of IAnalysis type
413
    :param sampletype: uid/brain/object of SampleType
414
    :param container: uid/brain/object of Container
415
    :param preservation: uid/brain/object of Preservation
416
    :param skip_fields: names of fields to be skipped on copy from primary
417
    :return: the new partition
418
    """
419
    partition_skip_fields = [
420
        "Analyses",
421
        "Attachment",
422
        "Client",
423
        "DetachedFrom",
424
        "Profile",
425
        "Profiles",
426
        "RejectionReasons",
427
        "Remarks",
428
        "ResultsInterpretation",
429
        "ResultsInterpretationDepts",
430
        "Sample",
431
        "Template",
432
        "creation_date",
433
        "modification_date",
434
        "ParentAnalysisRequest",
435
        "PrimaryAnalysisRequest",
436
        # default fields
437
        "id",
438
        "description",
439
        "allowDiscussion",
440
        "subject",
441
        "location",
442
        "contributors",
443
        "creators",
444
        "effectiveDate",
445
        "expirationDate",
446
        "language",
447
        "rights",
448
        "creation_date",
449
        "modification_date",
450
    ]
451
    if skip_fields:
452
        partition_skip_fields.extend(skip_fields)
453
        partition_skip_fields = list(set(partition_skip_fields))
454
455
    # Copy field values from the primary analysis request
456
    ar = api.get_object(analysis_request)
457
    record = fields_to_dict(ar, partition_skip_fields)
458
459
    # Update with values that are partition-specific
460
    record.update({
461
        "InternalUse": internal_use,
462
        "ParentAnalysisRequest": api.get_uid(ar),
463
    })
464
    if sample_type is not None:
465
        record["SampleType"] = sample_type and api.get_uid(sample_type) or ""
466
    if container is not None:
467
        record["Container"] = container and api.get_uid(container) or ""
468
    if preservation is not None:
469
        record["Preservation"] = preservation and api.get_uid(preservation) or ""
470
471
    # Create the Partition
472
    client = ar.getClient()
473
    analyses = list(set(map(api.get_object, analyses)))
474
    services = map(lambda an: an.getAnalysisService(), analyses)
475
476
    # Populate the root's ResultsRanges to partitions
477
    results_ranges = ar.getResultsRange() or []
478
479
    partition = create_analysisrequest(client,
480
                                       request=request,
481
                                       values=record,
482
                                       analyses=services,
483
                                       results_ranges=results_ranges)
484
485
    # Reindex Parent Analysis Request
486
    ar.reindexObject(idxs=["isRootAncestor"])
487
488
    return partition
489
490
491
def fields_to_dict(obj, skip_fields=None):
492
    """
493
    Generates a dictionary with the field values of the object passed in, where
494
    keys are the field names. Skips computed fields
495
    """
496
    data = {}
497
    obj = api.get_object(obj)
498
    for field_name, field in api.get_fields(obj).items():
499
        if skip_fields and field_name in skip_fields:
500
            continue
501
        if field.type == "computed":
502
            continue
503
        data[field_name] = field.get(obj)
504
    return data
505
506
507
def resolve_rejection_reasons(values):
508
    """Resolves the rejection reasons from the submitted values to the format
509
    supported by Sample's Rejection Reason field
510
    """
511
    rejection_reasons = values.get("RejectionReasons")
512
    if not rejection_reasons:
513
        return []
514
515
    # Predefined reasons selected?
516
    selected = rejection_reasons[0] or {}
517
    if selected.get("checkbox") == "on":
518
        selected = selected.get("multiselection") or []
519
    else:
520
        selected = []
521
522
    # Other reasons set?
523
    other = values.get("RejectionReasons.textfield")
524
    if other:
525
        other = other[0] or {}
526
        other = other.get("other", "")
527
    else:
528
        other = ""
529
530
    # If neither selected nor other reasons are set, return empty
531
    if any([selected, other]):
532
        return [{"selected": selected, "other": other}]
533
534
    return []
535
536
537
def do_rejection(sample, notify=None):
538
    """Rejects the sample and if succeeds, generates the rejection pdf and
539
    sends a notification email. If notify is None, the notification email will
540
    only be sent if the setting in Setup is enabled
541
    """
542
    sample_id = api.get_id(sample)
543
    if not sample.getRejectionReasons():
544
        logger.warn("Cannot reject {} w/o rejection reasons".format(sample_id))
545
        return
546
547
    success, msg = doActionFor(sample, "reject")
548
    if not success:
549
        logger.warn("Cannot reject the sample {}".format(sample_id))
550
        return
551
552
    # Generate a pdf with the rejection reasons
553
    pdf = get_rejection_pdf(sample)
554
555
    # Attach the PDF to the sample
556
    filename = "{}-rejected.pdf".format(sample_id)
557
    attachment = sample.createAttachment(pdf, filename=filename)
558
    pdf_file = attachment.getAttachmentFile()
559
560
    # Do we need to send a notification email?
561
    if notify is None:
562
        setup = api.get_setup()
563
        notify = setup.getNotifyOnSampleRejection()
564
565
    if notify:
566
        # Compose and send the email
567
        mime_msg = get_rejection_mail(sample, pdf_file)
568
        if mime_msg:
569
            # Send the email
570
            send_email(mime_msg)
571
572
573
def get_rejection_pdf(sample):
574
    """Generates a pdf with sample rejection reasons
575
    """
576
    # Avoid circular dependencies
577
    from bika.lims.browser.analysisrequest.reject import \
578
        AnalysisRequestRejectPdfView
579
580
    # Render the html's rejection document
581
    tpl = AnalysisRequestRejectPdfView(sample, api.get_request())
582
    html = tpl.template()
583
    html = safe_unicode(html).encode("utf-8")
584
585
    # Generate the pdf
586
    return createPdf(htmlreport=html)
587
588
589
def get_rejection_mail(sample, rejection_pdf=None):
590
    """Generates an email to sample contacts with rejection reasons
591
    """
592
    # Get the reasons
593
    reasons = sample.getRejectionReasons()
594
    reasons = reasons and reasons[0] or {}
595
    reasons = reasons.get("selected", []) + [reasons.get("other")]
596
    reasons = filter(None, reasons)
597
    reasons = "<br/>- ".join(reasons)
598
599
    # Render the email body
600
    setup = api.get_setup()
601
    lab_address = setup.laboratory.getPrintAddress()
602
    email_body = Template(setup.getEmailBodySampleRejection())
603
    email_body = email_body.safe_substitute({
604
        "lab_address": "<br/>".join(lab_address),
605
        "reasons": reasons and "<br/>-{}".format(reasons) or "",
606
        "sample_id": api.get_id(sample),
607
        "sample_link": get_link(api.get_url(sample), api.get_id(sample))
608
    })
609
610
    def to_valid_email_address(contact):
611
        if not contact:
612
            return None
613
        address = contact.getEmailAddress()
614
        if not is_valid_email_address(address):
615
            return None
616
        return address
617
618
    # Get the recipients
619
    _to = [sample.getContact()] + sample.getCCContact()
620
    _to = map(to_valid_email_address, _to)
621
    _to = filter(None, _to)
622
623
    if not _to:
624
        # Cannot send an e-mail without recipient!
625
        logger.warn("No valid recipients for {}".format(api.get_id(sample)))
626
        return None
627
628
    lab = api.get_setup().laboratory
629
    attachments = rejection_pdf and [rejection_pdf] or []
630
631
    return compose_email(
632
        from_addr=lab.getEmailAddress(),
633
        to_addr=_to,
634
        subj=_("%s has been rejected") % api.get_id(sample),
635
        body=email_body,
636
        html=True,
637
        attachments=attachments)
638