Passed
Push — master ( c1f984...7f6007 )
by Ramon
03:49
created

bika.lims.utils.analysisrequest   F

Complexity

Total Complexity 64

Size/Duplication

Total Lines 553
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 64
eloc 296
dl 0
loc 553
rs 3.28
c 0
b 0
f 0

12 Functions

Rating   Name   Duplication   Size   Complexity  
C to_service_uid() 0 34 9
A get_hidden_service_uids() 0 9 4
A apply_hidden_services() 0 23 3
A resolve_rejection_reasons() 0 28 5
A get_rejection_pdf() 0 14 1
A get_rejection_mail() 0 33 3
B create_analysisrequest() 0 91 6
A do_rejection() 0 32 5
A fields_to_dict() 0 14 5
C create_partition() 0 90 8
C create_retest() 0 71 9
B to_services_uids() 0 36 6

How to fix   Complexity   

Complexity

Complex classes like bika.lims.utils.analysisrequest often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
# -*- coding: utf-8 -*-
2
#
3
# This file is part of SENAITE.CORE.
4
#
5
# SENAITE.CORE is free software: you can redistribute it and/or modify it under
6
# the terms of the GNU General Public License as published by the Free Software
7
# Foundation, version 2.
8
#
9
# This program is distributed in the hope that it will be useful, but WITHOUT
10
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
12
# details.
13
#
14
# You should have received a copy of the GNU General Public License along with
15
# this program; if not, write to the Free Software Foundation, Inc., 51
16
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
#
18
# Copyright 2018-2020 by it's authors.
19
# Some rights reserved, see README and LICENSE.
20
21
import itertools
22
23
import six
24
from Products.Archetypes.config import UID_CATALOG
25
from Products.CMFCore.utils import getToolByName
26
from Products.CMFPlone.utils import _createObjectByType
27
from Products.CMFPlone.utils import safe_unicode
28
from zope.interface import alsoProvides
29
from zope.lifecycleevent import modified
30
31
from bika.lims import api
32
from bika.lims import bikaMessageFactory as _
33
from bika.lims import logger
34
from bika.lims.api.mail import compose_email
35
from bika.lims.api.mail import is_valid_email_address
36
from bika.lims.api.mail import send_email
37
from bika.lims.catalog import SETUP_CATALOG
38
from bika.lims.idserver import renameAfterCreation
39
from bika.lims.interfaces import IAnalysisRequest
40
from bika.lims.interfaces import IAnalysisRequestRetest
41
from bika.lims.interfaces import IAnalysisRequestSecondary
42
from bika.lims.interfaces import IAnalysisService
43
from bika.lims.interfaces import IReceived
44
from bika.lims.interfaces import IRoutineAnalysis
45
from bika.lims.utils import changeWorkflowState
46
from bika.lims.utils import copy_field_values
47
from bika.lims.utils import createPdf
48
from bika.lims.utils import tmpID
49
from bika.lims.workflow import ActionHandlerPool
50
from bika.lims.workflow import doActionFor
51
from bika.lims.workflow import push_reindex_to_actions_pool
52
from bika.lims.workflow.analysisrequest import AR_WORKFLOW_ID
53
from bika.lims.workflow.analysisrequest import do_action_to_analyses
54
55
56
def create_analysisrequest(client, request, values, analyses=None,
57
                           results_ranges=None, prices=None):
58
    """Creates a new AnalysisRequest (a Sample) object
59
    :param client: The container where the Sample will be created
60
    :param request: The current Http Request object
61
    :param values: A dict, with keys as AnalaysisRequest's schema field names
62
    :param analyses: List of Services or Analyses (brains, objects, UIDs,
63
        keywords). Extends the list from values["Analyses"]
64
    :param results_ranges: List of Results Ranges. Extends the results ranges
65
        from the Specification object defined in values["Specification"]
66
    :param prices: Mapping of AnalysisService UID -> price. If not set, prices
67
        are read from the associated analysis service.
68
    """
69
    # Don't pollute the dict param passed in
70
    values = dict(values.items())
71
72
    # Resolve the Service uids of analyses to be added in the Sample. Values
73
    # passed-in might contain Profiles and also values that are not uids. Also,
74
    # additional analyses can be passed-in through either values or services
75
    service_uids = to_services_uids(values=values, services=analyses)
76
77
    # Remove the Analyses from values. We will add them manually
78
    values.update({"Analyses": []})
79
80
    # Create the Analysis Request and submit the form
81
    ar = _createObjectByType('AnalysisRequest', client, tmpID())
82
    ar.processForm(REQUEST=request, values=values)
83
84
    # Set the analyses manually
85
    ar.setAnalyses(service_uids, prices=prices, specs=results_ranges)
86
87
    # Handle hidden analyses from template and profiles
88
    # https://github.com/senaite/senaite.core/issues/1437
89
    # https://github.com/senaite/senaite.core/issues/1326
90
    apply_hidden_services(ar)
91
92
    # Handle rejection reasons
93
    rejection_reasons = resolve_rejection_reasons(values)
94
    ar.setRejectionReasons(rejection_reasons)
95
96
    # Handle secondary Analysis Request
97
    primary = ar.getPrimaryAnalysisRequest()
98
    if primary:
99
        # Mark the secondary with the `IAnalysisRequestSecondary` interface
100
        alsoProvides(ar, IAnalysisRequestSecondary)
101
102
        # Rename the secondary according to the ID server setup
103
        renameAfterCreation(ar)
104
105
        # Set dates to match with those from the primary
106
        ar.setDateSampled(primary.getDateSampled())
107
        ar.setSamplingDate(primary.getSamplingDate())
108
        ar.setDateReceived(primary.getDateReceived())
109
110
        # Force the transition of the secondary to received and set the
111
        # description/comment in the transition accordingly.
112
        if primary.getDateReceived():
113
            primary_id = primary.getId()
114
            comment = "Auto-received. Secondary Sample of {}".format(primary_id)
115
            changeWorkflowState(ar, AR_WORKFLOW_ID, "sample_received",
116
                                action="receive", comments=comment)
117
118
            # Mark the secondary as received
119
            alsoProvides(ar, IReceived)
120
121
            # Initialize analyses
122
            do_action_to_analyses(ar, "initialize")
123
124
            # Notify the ar has ben modified
125
            modified(ar)
126
127
            # Reindex the AR
128
            ar.reindexObject()
129
130
            # If rejection reasons have been set, reject automatically
131
            if rejection_reasons:
132
                do_rejection(ar)
133
134
            # In "received" state already
135
            return ar
136
137
    # Try first with no sampling transition, cause it is the most common config
138
    success, message = doActionFor(ar, "no_sampling_workflow")
139
    if not success:
140
        doActionFor(ar, "to_be_sampled")
141
142
    # If rejection reasons have been set, reject the sample automatically
143
    if rejection_reasons:
144
        do_rejection(ar)
145
146
    return ar
147
148
149
def apply_hidden_services(sample):
150
    """
151
    Applies the hidden setting to the sample analyses in accordance with the
152
    settings from its template and/or profiles
153
    :param sample: the sample that contains the analyses
154
    """
155
    hidden = list()
156
157
    # Get the "hidden" service uids from the template
158
    template = sample.getTemplate()
159
    hidden = get_hidden_service_uids(template)
160
161
    # Get the "hidden" service uids from profiles
162
    profiles = sample.getProfiles()
163
    hid_profiles = map(get_hidden_service_uids, profiles)
164
    hid_profiles = list(itertools.chain(*hid_profiles))
165
    hidden.extend(hid_profiles)
166
167
    # Update the sample analyses
168
    analyses = sample.getAnalyses(full_objects=True)
169
    analyses = filter(lambda an: an.getServiceUID() in hidden, analyses)
170
    for analysis in analyses:
171
        analysis.setHidden(True)
172
173
174
def get_hidden_service_uids(profile_or_template):
175
    """Returns a list of service uids that are set as hidden
176
    :param profile_or_template: ARTemplate or AnalysisProfile object
177
    """
178
    if not profile_or_template:
179
        return []
180
    settings = profile_or_template.getAnalysisServicesSettings()
181
    hidden = filter(lambda ser: ser.get("hidden", False), settings)
182
    return map(lambda setting: setting["uid"], hidden)
183
184
185
def to_services_uids(services=None, values=None):
186
    """
187
    Returns a list of Analysis Services uids
188
    :param services: A list of service items (uid, keyword, brain, obj, title)
189
    :param values: a dict, where keys are AR|Sample schema field names.
190
    :returns: a list of Analyses Services UIDs
191
    """
192
    def to_list(value):
193
        if not value:
194
            return []
195
        if isinstance(value, six.string_types):
196
            return [value]
197
        if isinstance(value, (list, tuple)):
198
            return value
199
        logger.warn("Cannot convert to a list: {}".format(value))
200
        return []
201
202
    services = services or []
203
    values = values or {}
204
205
    # Merge analyses from analyses_serv and values into one list
206
    uids = to_list(services) + to_list(values.get("Analyses"))
207
208
    # Convert them to a list of service uids
209
    uids = filter(None, map(to_service_uid, uids))
210
211
    # Extend with service uids from profiles
212
    profiles = to_list(values.get("Profiles"))
213
    if profiles:
214
        uid_catalog = api.get_tool(UID_CATALOG)
215
        for brain in uid_catalog(UID=profiles):
216
            profile = api.get_object(brain)
217
            uids.extend(profile.getRawService() or [])
218
219
    # Get the service uids without duplicates, but preserving the order
220
    return list(dict.fromkeys(uids).keys())
221
222
223
def to_service_uid(uid_brain_obj_str):
224
    """Resolves the passed in element to a valid uid. Returns None if the value
225
    cannot be resolved to a valid uid
226
    """
227
    if api.is_uid(uid_brain_obj_str) and uid_brain_obj_str != "0":
228
        return uid_brain_obj_str
229
230
    if api.is_object(uid_brain_obj_str):
231
        obj = api.get_object(uid_brain_obj_str)
232
233
        if IAnalysisService.providedBy(obj):
234
            return api.get_uid(obj)
235
236
        elif IRoutineAnalysis.providedBy(obj):
237
            return obj.getServiceUID()
238
239
        else:
240
            logger.error("Type not supported: {}".format(obj.portal_type))
241
            return None
242
243
    if isinstance(uid_brain_obj_str, six.string_types):
244
        # Maybe is a keyword?
245
        query = dict(portal_type="AnalysisService", getKeyword=uid_brain_obj_str)
246
        brains = api.search(query, SETUP_CATALOG)
247
        if len(brains) == 1:
248
            return api.get_uid(brains[0])
249
250
        # Or maybe a title
251
        query = dict(portal_type="AnalysisService", title=uid_brain_obj_str)
252
        brains = api.search(query, SETUP_CATALOG)
253
        if len(brains) == 1:
254
            return api.get_uid(brains[0])
255
256
    return None
257
258
259
def create_retest(ar):
260
    """Creates a retest (Analysis Request) from an invalidated Analysis Request
261
    :param ar: The invalidated Analysis Request
262
    :type ar: IAnalysisRequest
263
    :rtype: IAnalysisRequest
264
    """
265
    if not ar:
266
        raise ValueError("Source Analysis Request cannot be None")
267
268
    if not IAnalysisRequest.providedBy(ar):
269
        raise ValueError("Type not supported: {}".format(repr(type(ar))))
270
271
    if ar.getRetest():
272
        # Do not allow the creation of another retest!
273
        raise ValueError("Retest already set")
274
275
    if not ar.isInvalid():
276
        # Analysis Request must be in 'invalid' state
277
        raise ValueError("Cannot do a retest from an invalid Analysis Request"
278
                         .format(repr(ar)))
279
280
    # Open the actions pool
281
    actions_pool = ActionHandlerPool.get_instance()
282
    actions_pool.queue_pool()
283
284
    # Create the Retest (Analysis Request)
285
    ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample']
286
    retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID())
287
    copy_field_values(ar, retest, ignore_fieldnames=ignore)
288
289
    # Mark the retest with the `IAnalysisRequestRetest` interface
290
    alsoProvides(retest, IAnalysisRequestRetest)
291
292
    # Assign the source to retest
293
    retest.setInvalidated(ar)
294
295
    # Rename the retest according to the ID server setup
296
    renameAfterCreation(retest)
297
298
    # Copy the analyses from the source
299
    intermediate_states = ['retracted', 'reflexed']
300
    for an in ar.getAnalyses(full_objects=True):
301
        if (api.get_workflow_status_of(an) in intermediate_states):
302
            # Exclude intermediate analyses
303
            continue
304
305
        nan = _createObjectByType("Analysis", retest, an.getKeyword())
306
307
        # Make a copy
308
        ignore_fieldnames = ['DataAnalysisPublished']
309
        copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames)
310
        nan.unmarkCreationFlag()
311
        push_reindex_to_actions_pool(nan)
312
313
    # Transition the retest to "sample_received"!
314
    changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received')
315
    alsoProvides(retest, IReceived)
316
317
    # Initialize analyses
318
    for analysis in retest.getAnalyses(full_objects=True):
319
        if not IRoutineAnalysis.providedBy(analysis):
320
            continue
321
        changeWorkflowState(analysis, "bika_analysis_workflow", "unassigned")
322
323
    # Reindex and other stuff
324
    push_reindex_to_actions_pool(retest)
325
    push_reindex_to_actions_pool(retest.aq_parent)
326
327
    # Resume the actions pool
328
    actions_pool.resume()
329
    return retest
330
331
332
def create_partition(analysis_request, request, analyses, sample_type=None,
333
                     container=None, preservation=None, skip_fields=None,
334
                     internal_use=True):
335
    """
336
    Creates a partition for the analysis_request (primary) passed in
337
    :param analysis_request: uid/brain/object of IAnalysisRequest type
338
    :param request: the current request object
339
    :param analyses: uids/brains/objects of IAnalysis type
340
    :param sampletype: uid/brain/object of SampleType
341
    :param container: uid/brain/object of Container
342
    :param preservation: uid/brain/object of Preservation
343
    :param skip_fields: names of fields to be skipped on copy from primary
344
    :return: the new partition
345
    """
346
    partition_skip_fields = [
347
        "Analyses",
348
        "Attachment",
349
        "Client",
350
        "DetachedFrom",
351
        "Profile",
352
        "Profiles",
353
        "RejectionReasons",
354
        "Remarks",
355
        "ResultsInterpretation",
356
        "ResultsInterpretationDepts",
357
        "Sample",
358
        "Template",
359
        "creation_date",
360
        "id",
361
        "modification_date",
362
        "ParentAnalysisRequest",
363
        "PrimaryAnalysisRequest",
364
    ]
365
    if skip_fields:
366
        partition_skip_fields.extend(skip_fields)
367
        partition_skip_fields = list(set(partition_skip_fields))
368
369
    # Copy field values from the primary analysis request
370
    ar = api.get_object(analysis_request)
371
    record = fields_to_dict(ar, partition_skip_fields)
372
373
    # Update with values that are partition-specific
374
    record.update({
375
        "InternalUse": internal_use,
376
        "ParentAnalysisRequest": api.get_uid(ar),
377
    })
378
    if sample_type is not None:
379
        record["SampleType"] = sample_type and api.get_uid(sample_type) or ""
380
    if container is not None:
381
        record["Container"] = container and api.get_uid(container) or ""
382
    if preservation is not None:
383
        record["Preservation"] = preservation and api.get_uid(preservation) or ""
384
385
    # Create the Partition
386
    client = ar.getClient()
387
    analyses = list(set(map(api.get_object, analyses)))
388
    services = map(lambda an: an.getAnalysisService(), analyses)
389
390
    # Populate the root's ResultsRanges to partitions
391
    results_ranges = ar.getResultsRange() or []
392
    partition = create_analysisrequest(client,
393
                                       request=request,
394
                                       values=record,
395
                                       analyses=services,
396
                                       results_ranges=results_ranges)
397
398
    # Reindex Parent Analysis Request
399
    ar.reindexObject(idxs=["isRootAncestor"])
400
401
    # Manually set the Date Received to match with its parent. This is
402
    # necessary because crar calls to processForm, so DateReceived is not
403
    # set because the partition has not been received yet
404
    partition.setDateReceived(ar.getDateReceived())
405
    partition.reindexObject(idxs="getDateReceived")
406
407
    # Force partition to same status as the primary
408
    status = api.get_workflow_status_of(ar)
409
    changeWorkflowState(partition, "bika_ar_workflow", status)
410
    if IReceived.providedBy(ar):
411
        alsoProvides(partition, IReceived)
412
413
    # And initialize the analyses the partition contains. This is required
414
    # here because the transition "initialize" of analyses rely on a guard,
415
    # so the initialization can only be performed when the sample has been
416
    # received (DateReceived is set)
417
    ActionHandlerPool.get_instance().queue_pool()
418
    for analysis in partition.getAnalyses(full_objects=True):
419
        doActionFor(analysis, "initialize")
420
    ActionHandlerPool.get_instance().resume()
421
    return partition
422
423
424
def fields_to_dict(obj, skip_fields=None):
425
    """
426
    Generates a dictionary with the field values of the object passed in, where
427
    keys are the field names. Skips computed fields
428
    """
429
    data = {}
430
    obj = api.get_object(obj)
431
    for field_name, field in api.get_fields(obj).items():
432
        if skip_fields and field_name in skip_fields:
433
            continue
434
        if field.type == "computed":
435
            continue
436
        data[field_name] = field.get(obj)
437
    return data
438
439
440
def resolve_rejection_reasons(values):
441
    """Resolves the rejection reasons from the submitted values to the format
442
    supported by Sample's Rejection Reason field
443
    """
444
    rejection_reasons = values.get("RejectionReasons")
445
    if not rejection_reasons:
446
        return []
447
448
    # Predefined reasons selected?
449
    selected = rejection_reasons[0] or {}
450
    if selected.get("checkbox") == "on":
451
        selected = selected.get("multiselection") or []
452
    else:
453
        selected = []
454
455
    # Other reasons set?
456
    other = values.get("RejectionReasons.textfield")
457
    if other:
458
        other = other[0] or {}
459
        other = other.get("other", "")
460
    else:
461
        other = ""
462
463
    # If neither selected nor other reasons are set, return empty
464
    if any([selected, other]):
465
        return [{"selected": selected, "other": other}]
466
467
    return []
468
469
470
def do_rejection(sample, notify=None):
471
    """Rejects the sample and if succeeds, generates the rejection pdf and
472
    sends a notification email. If notify is None, the notification email will
473
    only be sent if the setting in Setup is enabled
474
    """
475
    sample_id = api.get_id(sample)
476
    if not sample.getRejectionReasons():
477
        logger.warn("Cannot reject {} w/o rejection reasons".format(sample_id))
478
        return
479
480
    success, msg = doActionFor(sample, "reject")
481
    if not success:
482
        logger.warn("Cannot reject the sample {}".format(sample_id))
483
        return
484
485
    # Generate a pdf with the rejection reasons
486
    pdf = get_rejection_pdf(sample)
487
488
    # Attach the PDF to the sample
489
    filename = "{}-rejected.pdf".format(sample_id)
490
    sample.createAttachment(pdf, filename=filename)
491
492
    # Do we need to send a notification email?
493
    if notify is None:
494
        setup = api.get_setup()
495
        notify = setup.getNotifyOnSampleRejection()
496
497
    if notify:
498
        # Compose and send the email
499
        mime_msg = get_rejection_mail(sample, pdf)
500
        # Send the email
501
        send_email(mime_msg)
502
503
504
def get_rejection_pdf(sample):
505
    """Generates a pdf with sample rejection reasons
506
    """
507
    # Avoid circular dependencies
508
    from bika.lims.browser.analysisrequest.reject import \
509
        AnalysisRequestRejectPdfView
510
511
    # Render the html's rejection document
512
    tpl = AnalysisRequestRejectPdfView(sample, api.get_request())
513
    html = tpl.template()
514
    html = safe_unicode(html).encode("utf-8")
515
516
    # Generate the pdf
517
    return createPdf(htmlreport=html)
518
519
520
def get_rejection_mail(sample, rejection_pdf=None):
521
    """Generates an email to sample contacts with rejection reasons
522
    """
523
    # Avoid circular dependencies
524
    from bika.lims.browser.analysisrequest.reject import \
525
        AnalysisRequestRejectEmailView
526
527
    # Render the email body
528
    tpl = AnalysisRequestRejectEmailView(sample, api.get_request())
529
    email_body = tpl.template()
530
531
    def to_valid_email_address(contact):
532
        if not contact:
533
            return None
534
        address = contact.getEmailAddress()
535
        if not is_valid_email_address(address):
536
            return None
537
        return address
538
539
    # Get the recipients
540
    _to = [sample.getContact()] + sample.getCCContact()
541
    _to = map(to_valid_email_address, _to)
542
    _to = filter(None, _to)
543
544
    lab = api.get_setup().laboratory
545
    attachments = rejection_pdf and [rejection_pdf] or []
546
547
    return compose_email(
548
        from_addr=lab.getEmailAddress(),
549
        to_addr=_to,
550
        subj=_("%s has been rejected") % api.get_id(sample),
551
        body=email_body,
552
        attachments=attachments)
553