Passed
Push — master ( 5e306e...e48f16 )
by Jordi
04:40
created

bika.lims.utils.analysisrequest   B

Complexity

Total Complexity 51

Size/Duplication

Total Lines 439
Duplicated Lines 0 %

Importance

Changes 0
Metric Value
wmc 51
eloc 242
dl 0
loc 439
rs 7.92
c 0
b 0
f 0

7 Functions

Rating   Name   Duplication   Size   Complexity  
C notify_rejection() 0 80 8
B _resolve_items_to_service_uids() 0 37 7
A create_analysisrequest() 0 52 3
A fields_to_dict() 0 14 5
C create_partition() 0 88 8
C get_services_uids() 0 48 11
C create_retest() 0 70 9

How to fix   Complexity   

Complexity

Complex classes like bika.lims.utils.analysisrequest often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
# -*- coding: utf-8 -*-
2
#
3
# This file is part of SENAITE.CORE
4
#
5
# Copyright 2018 by it's authors.
6
# Some rights reserved. See LICENSE.rst, CONTRIBUTORS.rst.
7
8
import os
9
import tempfile
10
from email.mime.multipart import MIMEMultipart
11
from email.mime.text import MIMEText
12
from email.Utils import formataddr
13
14
from bika.lims import api
15
from bika.lims import bikaMessageFactory as _
16
from bika.lims import logger
17
from bika.lims.idserver import renameAfterCreation
18
from bika.lims.interfaces import IAnalysisRequest
19
from bika.lims.interfaces import IAnalysisRequestRetest
20
from bika.lims.interfaces import IAnalysisService
21
from bika.lims.interfaces import IRoutineAnalysis
22
from bika.lims.utils import attachPdf
23
from bika.lims.utils import changeWorkflowState
24
from bika.lims.utils import copy_field_values
25
from bika.lims.utils import createPdf
26
from bika.lims.utils import encode_header
27
from bika.lims.utils import tmpID
28
from bika.lims.utils import to_utf8
29
from bika.lims.workflow import ActionHandlerPool
30
from bika.lims.workflow import doActionFor
31
from bika.lims.workflow import push_reindex_to_actions_pool
32
from Products.CMFCore.utils import getToolByName
33
from Products.CMFPlone.utils import _createObjectByType
34
from Products.CMFPlone.utils import safe_unicode
35
from zope.interface import alsoProvides
36
37
38
def create_analysisrequest(client, request, values, analyses=None,
39
                           partitions=None, specifications=None, prices=None):
40
    """This is meant for general use and should do everything necessary to
41
    create and initialise an AR and any other required auxilliary objects
42
    (Sample, SamplePartition, Analysis...)
43
    :param client:
44
        The container (Client) in which the ARs will be created.
45
    :param request:
46
        The current Request object.
47
    :param values:
48
        a dict, where keys are AR|Sample schema field names.
49
    :param analyses:
50
        Analysis services list.  If specified, augments the values in
51
        values['Analyses']. May consist of service objects, UIDs, or Keywords.
52
    :param partitions:
53
        A list of dictionaries, if specific partitions are required.  If not
54
        specified, AR's sample is created with a single partition.
55
    :param specifications:
56
        These values augment those found in values['Specifications']
57
    :param prices:
58
        Allow different prices to be set for analyses.  If not set, prices
59
        are read from the associated analysis service.
60
    """
61
    # Don't pollute the dict param passed in
62
    values = dict(values.items())
63
64
    # Create new sample or locate the existing for secondary AR
65
    secondary = False
66
    # TODO Sample Cleanup - Manage secondary ARs properly
67
68
    # Create the Analysis Request
69
    ar = _createObjectByType('AnalysisRequest', client, tmpID())
70
    ar.processForm(REQUEST=request, values=values)
71
72
    # Resolve the services uids and set the analyses for this Analysis Request
73
    service_uids = get_services_uids(context=client, values=values,
74
                                     analyses_serv=analyses)
75
    ar.setAnalyses(service_uids, prices=prices, specs=specifications)
76
77
    # TODO Sample Cleanup - Manage secondary ARs properly
78
    if secondary:
79
        # Secondary AR does not longer comes from a Sample, rather from an AR.
80
        # If the Primary AR has been received, then force the transition of the
81
        # secondary to received and set the description/comment in the
82
        # transition accordingly so it will be displayed later in the log tab
83
        logger.warn("Sync transition for secondary AR is still missing")
84
85
    # Try first with no sampling transition, cause it is the most common config
86
    success, message = doActionFor(ar, "no_sampling_workflow")
87
    if not success:
88
        doActionFor(ar, "to_be_sampled")
89
    return ar
90
91
92
def get_services_uids(context=None, analyses_serv=None, values=None):
93
    """
94
    This function returns a list of UIDs from analyses services from its
95
    parameters.
96
    :param analyses_serv: A list (or one object) of service-related info items.
97
        see _resolve_items_to_service_uids() docstring.
98
    :type analyses_serv: list
99
    :param values: a dict, where keys are AR|Sample schema field names.
100
    :type values: dict
101
    :returns: a list of analyses services UIDs
102
    """
103
    if not analyses_serv:
104
        analyses_serv = []
105
    if not values:
106
        values = {}
107
108
    if not context or (not analyses_serv and not values):
109
        raise RuntimeError(
110
            "get_services_uids: Missing or wrong parameters.")
111
112
    # Merge analyses from analyses_serv and values into one list
113
    analyses_services = analyses_serv + (values.get("Analyses", None) or [])
114
115
    # It is possible to create analysis requests
116
    # by JSON petitions and services, profiles or types aren't allways send.
117
    # Sometimes we can get analyses and profiles that doesn't match and we
118
    # should act in consequence.
119
    # Getting the analyses profiles
120
    analyses_profiles = values.get('Profiles', [])
121
    if not isinstance(analyses_profiles, (list, tuple)):
122
        # Plone converts the incoming form value to a list, if there are
123
        # multiple values; but if not, it will send a string (a single UID).
124
        analyses_profiles = [analyses_profiles]
125
126
    if not analyses_services and not analyses_profiles:
127
        return []
128
129
    # Add analysis services UIDs from profiles to analyses_services variable.
130
    if analyses_profiles:
131
        uid_catalog = getToolByName(context, 'uid_catalog')
132
        for brain in uid_catalog(UID=analyses_profiles):
133
            profile = api.get_object(brain)
134
            # Only services UIDs
135
            services_uids = profile.getRawService()
136
            # _resolve_items_to_service_uids() will remove duplicates
137
            analyses_services += services_uids
138
139
    return _resolve_items_to_service_uids(analyses_services)
140
141
142
def _resolve_items_to_service_uids(items):
143
    """ Returns a list of service uids without duplicates based on the items
144
    :param items:
145
        A list (or one object) of service-related info items. The list can be
146
        heterogeneous and each item can be:
147
        - Analysis Service instance
148
        - Analysis instance
149
        - Analysis Service title
150
        - Analysis Service UID
151
        - Analysis Service Keyword
152
        If an item that doesn't match any of the criterias above is found, the
153
        function will raise a RuntimeError
154
    """
155
    def resolve_to_uid(item):
156
        if api.is_uid(item):
157
            return item
158
        elif IAnalysisService.providedBy(item):
159
            return item.UID()
160
        elif IRoutineAnalysis.providedBy(item):
161
            return item.getServiceUID()
162
163
        bsc = api.get_tool("bika_setup_catalog")
164
        brains = bsc(portal_type='AnalysisService', getKeyword=item)
165
        if brains:
166
            return brains[0].UID
167
        brains = bsc(portal_type='AnalysisService', title=item)
168
        if brains:
169
            return brains[0].UID
170
        raise RuntimeError(
171
            str(item) + " should be the UID, title, keyword "
172
                        " or title of an AnalysisService.")
173
174
    # Maybe only a single item was passed
175
    if type(items) not in (list, tuple):
176
        items = [items, ]
177
    service_uids = map(resolve_to_uid, list(set(items)))
178
    return list(set(service_uids))
179
180
181
def notify_rejection(analysisrequest):
182
    """
183
    Notifies via email that a given Analysis Request has been rejected. The
184
    notification is sent to the Client contacts assigned to the Analysis
185
    Request.
186
187
    :param analysisrequest: Analysis Request to which the notification refers
188
    :returns: true if success
189
    """
190
191
    # We do this imports here to avoid circular dependencies until we deal
192
    # better with this notify_rejection thing.
193
    from bika.lims.browser.analysisrequest.reject import \
194
        AnalysisRequestRejectPdfView, AnalysisRequestRejectEmailView
195
196
    arid = analysisrequest.getId()
197
198
    # This is the template to render for the pdf that will be either attached
199
    # to the email and attached the the Analysis Request for further access
200
    tpl = AnalysisRequestRejectPdfView(analysisrequest, analysisrequest.REQUEST)
201
    html = tpl.template()
202
    html = safe_unicode(html).encode('utf-8')
203
    filename = '%s-rejected' % arid
204
    pdf_fn = tempfile.mktemp(suffix=".pdf")
205
    pdf = createPdf(htmlreport=html, outfile=pdf_fn)
206
    if pdf:
207
        # Attach the pdf to the Analysis Request
208
        attid = analysisrequest.aq_parent.generateUniqueId('Attachment')
209
        att = _createObjectByType(
210
            "Attachment", analysisrequest.aq_parent, attid)
211
        att.setAttachmentFile(open(pdf_fn))
212
        # Awkward workaround to rename the file
213
        attf = att.getAttachmentFile()
214
        attf.filename = '%s.pdf' % filename
215
        att.setAttachmentFile(attf)
216
        att.unmarkCreationFlag()
217
        renameAfterCreation(att)
218
        atts = analysisrequest.getAttachment() + [att] if \
219
            analysisrequest.getAttachment() else [att]
220
        atts = [a.UID() for a in atts]
221
        analysisrequest.setAttachment(atts)
222
        os.remove(pdf_fn)
223
224
    # This is the message for the email's body
225
    tpl = AnalysisRequestRejectEmailView(
226
        analysisrequest, analysisrequest.REQUEST)
227
    html = tpl.template()
228
    html = safe_unicode(html).encode('utf-8')
229
230
    # compose and send email.
231
    mailto = []
232
    lab = analysisrequest.bika_setup.laboratory
233
    mailfrom = formataddr((encode_header(lab.getName()), lab.getEmailAddress()))
234
    mailsubject = _('%s has been rejected') % arid
235
    contacts = [analysisrequest.getContact()] + analysisrequest.getCCContact()
236
    for contact in contacts:
237
        name = to_utf8(contact.getFullname())
238
        email = to_utf8(contact.getEmailAddress())
239
        if email:
240
            mailto.append(formataddr((encode_header(name), email)))
241
    if not mailto:
242
        return False
243
    mime_msg = MIMEMultipart('related')
244
    mime_msg['Subject'] = mailsubject
245
    mime_msg['From'] = mailfrom
246
    mime_msg['To'] = ','.join(mailto)
247
    mime_msg.preamble = 'This is a multi-part MIME message.'
248
    msg_txt = MIMEText(html, _subtype='html')
249
    mime_msg.attach(msg_txt)
250
    if pdf:
251
        attachPdf(mime_msg, pdf, filename)
252
253
    try:
254
        host = getToolByName(analysisrequest, 'MailHost')
255
        host.send(mime_msg.as_string(), immediate=True)
256
    except:
257
        logger.warning(
258
            "Email with subject %s was not sent (SMTP connection error)" % mailsubject)
259
260
    return True
261
262
263
def create_retest(ar):
264
    """Creates a retest (Analysis Request) from an invalidated Analysis Request
265
    :param ar: The invalidated Analysis Request
266
    :type ar: IAnalysisRequest
267
    :rtype: IAnalysisRequest
268
    """
269
    if not ar:
270
        raise ValueError("Source Analysis Request cannot be None")
271
272
    if not IAnalysisRequest.providedBy(ar):
273
        raise ValueError("Type not supported: {}".format(repr(type(ar))))
274
275
    if ar.getRetest():
276
        # Do not allow the creation of another retest!
277
        raise ValueError("Retest already set")
278
279
    if not ar.isInvalid():
280
        # Analysis Request must be in 'invalid' state
281
        raise ValueError("Cannot do a retest from an invalid Analysis Request"
282
                         .format(repr(ar)))
283
284
    # Open the actions pool
285
    actions_pool = ActionHandlerPool.get_instance()
286
    actions_pool.queue_pool()
287
288
    # Create the Retest (Analysis Request)
289
    ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample']
290
    retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID())
291
    copy_field_values(ar, retest, ignore_fieldnames=ignore)
292
293
    # Mark the retest with the `IAnalysisRequestRetest` interface
294
    alsoProvides(retest, IAnalysisRequestRetest)
295
296
    # Assign the source to retest
297
    retest.setInvalidated(ar)
298
299
    # Rename the retest according to the ID server setup
300
    renameAfterCreation(retest)
301
302
    # Copy the analyses from the source
303
    intermediate_states = ['retracted', 'reflexed']
304
    for an in ar.getAnalyses(full_objects=True):
305
        if (api.get_workflow_status_of(an) in intermediate_states):
306
            # Exclude intermediate analyses
307
            continue
308
309
        nan = _createObjectByType("Analysis", retest, an.getKeyword())
310
311
        # Make a copy
312
        ignore_fieldnames = ['DataAnalysisPublished']
313
        copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames)
314
        nan.unmarkCreationFlag()
315
        push_reindex_to_actions_pool(nan)
316
317
    # Transition the retest to "sample_received"!
318
    changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received')
319
320
    # Initialize analyses
321
    for analysis in retest.getAnalyses(full_objects=True):
322
        if not IRoutineAnalysis.providedBy(analysis):
323
            continue
324
        changeWorkflowState(analysis, "bika_analysis_workflow", "unassigned")
325
326
    # Reindex and other stuff
327
    push_reindex_to_actions_pool(retest)
328
    push_reindex_to_actions_pool(retest.aq_parent)
329
330
    # Resume the actions pool
331
    actions_pool.resume()
332
    return retest
333
334
335
def create_partition(analysis_request, request, analyses, sample_type=None,
336
                     container=None, preservation=None, skip_fields=None,
337
                     remove_primary_analyses=True):
338
    """
339
    Creates a partition for the analysis_request (primary) passed in
340
    :param analysis_request: uid/brain/object of IAnalysisRequest type
341
    :param request: the current request object
342
    :param analyses: uids/brains/objects of IAnalysis type
343
    :param sampletype: uid/brain/object of SampleType
344
    :param container: uid/brain/object of Container
345
    :param preservation: uid/brain/object of Preservation
346
    :param skip_fields: names of fields to be skipped on copy from primary
347
    :param remove_primary_analyses: removes the analyses from the parent
348
    :return: the new partition
349
    """
350
    partition_skip_fields = [
351
        "Analyses",
352
        "Attachment",
353
        "Client",
354
        "Profile",
355
        "Profiles",
356
        "RejectionReasons",
357
        "Remarks",
358
        "ResultsInterpretation",
359
        "ResultsInterpretationDepts",
360
        "Sample",
361
        "Template",
362
        "creation_date",
363
        "id",
364
        "modification_date",
365
        "ParentAnalysisRequest",
366
    ]
367
    if skip_fields:
368
        partition_skip_fields.extend(skip_fields)
369
        partition_skip_fields = list(set(partition_skip_fields))
370
371
    # Copy field values from the primary analysis request
372
    ar = api.get_object(analysis_request)
373
    record = fields_to_dict(ar, partition_skip_fields)
374
375
    # Update with values that are partition-specific
376
    record.update({
377
        "InternalUse": True,
378
        "ParentAnalysisRequest": api.get_uid(ar),
379
    })
380
    if sample_type is not None:
381
        record["SampleType"] = sample_type and api.get_uid(sample_type) or ""
382
    if container is not None:
383
        record["Container"] = container and api.get_uid(container) or ""
384
    if preservation is not None:
385
        record["Preservation"] = preservation and api.get_uid(preservation) or ""
386
387
    # Create the Partition
388
    client = ar.getClient()
389
    analyses = list(set(map(api.get_object, analyses)))
390
    services = map(lambda an: an.getAnalysisService(), analyses)
391
    specs = ar.getSpecification()
392
    specs = specs and specs.getResultsRange() or []
393
    partition = create_analysisrequest(client, request=request, values=record,
394
                                       analyses=services, specifications=specs)
395
396
    # Remove analyses from the primary
397
    if remove_primary_analyses:
398
        analyses_ids = map(api.get_id, analyses)
399
        ar.manage_delObjects(analyses_ids)
400
401
    # Reindex Parent Analysis Request
402
    ar.reindexObject(idxs=["isRootAncestor"])
403
404
    # Manually set the Date Received to match with its parent. This is
405
    # necessary because crar calls to processForm, so DateReceived is not
406
    # set because the partition has not been received yet
407
    partition.setDateReceived(ar.getDateReceived())
408
    partition.reindexObject(idxs="getDateReceived")
409
410
    # Force partition to same status as the primary
411
    status = api.get_workflow_status_of(ar)
412
    changeWorkflowState(partition, "bika_ar_workflow", status)
413
414
    # And initialize the analyses the partition contains. This is required
415
    # here because the transition "initialize" of analyses rely on a guard,
416
    # so the initialization can only be performed when the sample has been
417
    # received (DateReceived is set)
418
    ActionHandlerPool.get_instance().queue_pool()
419
    for analysis in partition.getAnalyses(full_objects=True):
420
        doActionFor(analysis, "initialize")
421
    ActionHandlerPool.get_instance().resume()
422
    return partition
423
424
425
def fields_to_dict(obj, skip_fields=None):
426
    """
427
    Generates a dictionary with the field values of the object passed in, where
428
    keys are the field names. Skips computed fields
429
    """
430
    data = {}
431
    obj = api.get_object(obj)
432
    for field_name, field in api.get_fields(obj).items():
433
        if skip_fields and field_name in skip_fields:
434
            continue
435
        if field.type == "computed":
436
            continue
437
        data[field_name] = field.get(obj)
438
    return data
439