Passed
Push — master ( eb777d...88a96a )
by Ramon
04:26
created

notify_rejection()   B

Complexity

Conditions 7

Size

Total Lines 77
Code Lines 54

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
eloc 54
dl 0
loc 77
rs 7.1054
c 0
b 0
f 0
cc 7
nop 1

How to fix   Long Method   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
# -*- coding: utf-8 -*-
2
#
3
# This file is part of SENAITE.CORE.
4
#
5
# SENAITE.CORE is free software: you can redistribute it and/or modify it under
6
# the terms of the GNU General Public License as published by the Free Software
7
# Foundation, version 2.
8
#
9
# This program is distributed in the hope that it will be useful, but WITHOUT
10
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
12
# details.
13
#
14
# You should have received a copy of the GNU General Public License along with
15
# this program; if not, write to the Free Software Foundation, Inc., 51
16
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
#
18
# Copyright 2018-2019 by it's authors.
19
# Some rights reserved, see README and LICENSE.
20
21
import os
22
import tempfile
23
from email.mime.multipart import MIMEMultipart
24
from email.mime.text import MIMEText
25
26
from Products.CMFCore.utils import getToolByName
27
from Products.CMFPlone.utils import _createObjectByType
28
from Products.CMFPlone.utils import safe_unicode
29
from bika.lims import api
30
from bika.lims import bikaMessageFactory as _
31
from bika.lims import logger
32
from bika.lims.idserver import renameAfterCreation
33
from bika.lims.interfaces import IAnalysisRequest
34
from bika.lims.interfaces import IAnalysisRequestRetest
35
from bika.lims.interfaces import IAnalysisRequestSecondary
36
from bika.lims.interfaces import IAnalysisService
37
from bika.lims.interfaces import IRoutineAnalysis
38
from bika.lims.utils import attachPdf
39
from bika.lims.utils import changeWorkflowState
40
from bika.lims.utils import copy_field_values
41
from bika.lims.utils import createPdf
42
from bika.lims.utils import encode_header
43
from bika.lims.utils import tmpID
44
from bika.lims.utils import to_utf8
45
from bika.lims.workflow import ActionHandlerPool
46
from bika.lims.workflow import doActionFor
47
from bika.lims.workflow import push_reindex_to_actions_pool
48
from bika.lims.workflow.analysisrequest import AR_WORKFLOW_ID
49
from bika.lims.workflow.analysisrequest import do_action_to_analyses
50
from email.Utils import formataddr
51
from zope.interface import alsoProvides
52
53
54
def create_analysisrequest(client, request, values, analyses=None,
55
                           partitions=None, specifications=None, prices=None):
56
    """This is meant for general use and should do everything necessary to
57
    create and initialise an AR and any other required auxilliary objects
58
    (Sample, SamplePartition, Analysis...)
59
    :param client:
60
        The container (Client) in which the ARs will be created.
61
    :param request:
62
        The current Request object.
63
    :param values:
64
        a dict, where keys are AR|Sample schema field names.
65
    :param analyses:
66
        Analysis services list.  If specified, augments the values in
67
        values['Analyses']. May consist of service objects, UIDs, or Keywords.
68
    :param partitions:
69
        A list of dictionaries, if specific partitions are required.  If not
70
        specified, AR's sample is created with a single partition.
71
    :param specifications:
72
        These values augment those found in values['Specifications']
73
    :param prices:
74
        Allow different prices to be set for analyses.  If not set, prices
75
        are read from the associated analysis service.
76
    """
77
    # Don't pollute the dict param passed in
78
    values = dict(values.items())
79
80
    # Create the Analysis Request
81
    ar = _createObjectByType('AnalysisRequest', client, tmpID())
82
    ar.processForm(REQUEST=request, values=values)
83
84
    # Resolve the services uids and set the analyses for this Analysis Request
85
    service_uids = get_services_uids(context=client, values=values,
86
                                     analyses_serv=analyses)
87
    ar.setAnalyses(service_uids, prices=prices, specs=specifications)
88
89
    # Handle secondary Analysis Request
90
    primary = ar.getPrimaryAnalysisRequest()
91
    if primary:
92
        # Mark the secondary with the `IAnalysisRequestSecondary` interface
93
        alsoProvides(ar, IAnalysisRequestSecondary)
94
95
        # Rename the secondary according to the ID server setup
96
        renameAfterCreation(ar)
97
98
        # Set dates to match with those from the primary
99
        ar.setDateSampled(primary.getDateSampled())
100
        ar.setSamplingDate(primary.getSamplingDate())
101
        ar.setDateReceived(primary.getDateReceived())
102
103
        # Force the transition of the secondary to received and set the
104
        # description/comment in the transition accordingly.
105
        if primary.getDateReceived():
106
            primary_id = primary.getId()
107
            comment = "Auto-received. Secondary Sample of {}".format(primary_id)
108
            changeWorkflowState(ar, AR_WORKFLOW_ID, "sample_received",
109
                                action="receive", comments=comment)
110
111
            # Initialize analyses
112
            do_action_to_analyses(ar, "initialize")
113
114
            # Reindex the AR
115
            ar.reindexObject()
116
117
            # In "received" state already
118
            return ar
119
120
    # Try first with no sampling transition, cause it is the most common config
121
    success, message = doActionFor(ar, "no_sampling_workflow")
122
    if not success:
123
        doActionFor(ar, "to_be_sampled")
124
    return ar
125
126
127
def get_services_uids(context=None, analyses_serv=None, values=None):
128
    """
129
    This function returns a list of UIDs from analyses services from its
130
    parameters.
131
    :param analyses_serv: A list (or one object) of service-related info items.
132
        see _resolve_items_to_service_uids() docstring.
133
    :type analyses_serv: list
134
    :param values: a dict, where keys are AR|Sample schema field names.
135
    :type values: dict
136
    :returns: a list of analyses services UIDs
137
    """
138
    if not analyses_serv:
139
        analyses_serv = []
140
    if not values:
141
        values = {}
142
143
    if not context or (not analyses_serv and not values):
144
        raise RuntimeError(
145
            "get_services_uids: Missing or wrong parameters.")
146
147
    # Merge analyses from analyses_serv and values into one list
148
    analyses_services = analyses_serv + (values.get("Analyses", None) or [])
149
150
    # It is possible to create analysis requests
151
    # by JSON petitions and services, profiles or types aren't allways send.
152
    # Sometimes we can get analyses and profiles that doesn't match and we
153
    # should act in consequence.
154
    # Getting the analyses profiles
155
    analyses_profiles = values.get('Profiles', [])
156
    if not isinstance(analyses_profiles, (list, tuple)):
157
        # Plone converts the incoming form value to a list, if there are
158
        # multiple values; but if not, it will send a string (a single UID).
159
        analyses_profiles = [analyses_profiles]
160
161
    if not analyses_services and not analyses_profiles:
162
        return []
163
164
    # Add analysis services UIDs from profiles to analyses_services variable.
165
    if analyses_profiles:
166
        uid_catalog = getToolByName(context, 'uid_catalog')
167
        for brain in uid_catalog(UID=analyses_profiles):
168
            profile = api.get_object(brain)
169
            # Only services UIDs
170
            services_uids = profile.getRawService()
171
            # _resolve_items_to_service_uids() will remove duplicates
172
            analyses_services += services_uids
173
174
    return _resolve_items_to_service_uids(analyses_services)
175
176
177
def _resolve_items_to_service_uids(items):
178
    """ Returns a list of service uids without duplicates based on the items
179
    :param items:
180
        A list (or one object) of service-related info items. The list can be
181
        heterogeneous and each item can be:
182
        - Analysis Service instance
183
        - Analysis instance
184
        - Analysis Service title
185
        - Analysis Service UID
186
        - Analysis Service Keyword
187
        If an item that doesn't match any of the criterias above is found, the
188
        function will raise a RuntimeError
189
    """
190
    def resolve_to_uid(item):
191
        if api.is_uid(item):
192
            return item
193
        elif IAnalysisService.providedBy(item):
194
            return item.UID()
195
        elif IRoutineAnalysis.providedBy(item):
196
            return item.getServiceUID()
197
198
        bsc = api.get_tool("bika_setup_catalog")
199
        brains = bsc(portal_type='AnalysisService', getKeyword=item)
200
        if brains:
201
            return brains[0].UID
202
        brains = bsc(portal_type='AnalysisService', title=item)
203
        if brains:
204
            return brains[0].UID
205
        raise RuntimeError(
206
            str(item) + " should be the UID, title, keyword "
207
                        " or title of an AnalysisService.")
208
209
    # Maybe only a single item was passed
210
    if type(items) not in (list, tuple):
211
        items = [items, ]
212
    service_uids = map(resolve_to_uid, list(set(items)))
213
    return list(set(service_uids))
214
215
216
def notify_rejection(analysisrequest):
217
    """
218
    Notifies via email that a given Analysis Request has been rejected. The
219
    notification is sent to the Client contacts assigned to the Analysis
220
    Request.
221
222
    :param analysisrequest: Analysis Request to which the notification refers
223
    :returns: true if success
224
    """
225
226
    # We do this imports here to avoid circular dependencies until we deal
227
    # better with this notify_rejection thing.
228
    from bika.lims.browser.analysisrequest.reject import \
229
        AnalysisRequestRejectPdfView, AnalysisRequestRejectEmailView
230
231
    arid = analysisrequest.getId()
232
233
    # This is the template to render for the pdf that will be either attached
234
    # to the email and attached the the Analysis Request for further access
235
    tpl = AnalysisRequestRejectPdfView(analysisrequest, analysisrequest.REQUEST)
236
    html = tpl.template()
237
    html = safe_unicode(html).encode('utf-8')
238
    filename = '%s-rejected' % arid
239
    pdf_fn = tempfile.mktemp(suffix=".pdf")
240
    pdf = createPdf(htmlreport=html, outfile=pdf_fn)
241
    if pdf:
242
        # Attach the pdf to the Analysis Request
243
        attid = analysisrequest.aq_parent.generateUniqueId('Attachment')
244
        att = _createObjectByType(
245
            "Attachment", analysisrequest.aq_parent, attid)
246
        att.setAttachmentFile(open(pdf_fn))
247
        # Awkward workaround to rename the file
248
        attf = att.getAttachmentFile()
249
        attf.filename = '%s.pdf' % filename
250
        att.setAttachmentFile(attf)
251
        att.unmarkCreationFlag()
252
        renameAfterCreation(att)
253
        analysisrequest.addAttachment(att)
254
        os.remove(pdf_fn)
255
256
    # This is the message for the email's body
257
    tpl = AnalysisRequestRejectEmailView(
258
        analysisrequest, analysisrequest.REQUEST)
259
    html = tpl.template()
260
    html = safe_unicode(html).encode('utf-8')
261
262
    # compose and send email.
263
    mailto = []
264
    lab = analysisrequest.bika_setup.laboratory
265
    mailfrom = formataddr((encode_header(lab.getName()), lab.getEmailAddress()))
266
    mailsubject = _('%s has been rejected') % arid
267
    contacts = [analysisrequest.getContact()] + analysisrequest.getCCContact()
268
    for contact in contacts:
269
        name = to_utf8(contact.getFullname())
270
        email = to_utf8(contact.getEmailAddress())
271
        if email:
272
            mailto.append(formataddr((encode_header(name), email)))
273
    if not mailto:
274
        return False
275
    mime_msg = MIMEMultipart('related')
276
    mime_msg['Subject'] = mailsubject
277
    mime_msg['From'] = mailfrom
278
    mime_msg['To'] = ','.join(mailto)
279
    mime_msg.preamble = 'This is a multi-part MIME message.'
280
    msg_txt = MIMEText(html, _subtype='html')
281
    mime_msg.attach(msg_txt)
282
    if pdf:
283
        attachPdf(mime_msg, pdf, filename)
284
285
    try:
286
        host = getToolByName(analysisrequest, 'MailHost')
287
        host.send(mime_msg.as_string(), immediate=True)
288
    except:
289
        logger.warning(
290
            "Email with subject %s was not sent (SMTP connection error)" % mailsubject)
291
292
    return True
293
294
295
def create_retest(ar):
296
    """Creates a retest (Analysis Request) from an invalidated Analysis Request
297
    :param ar: The invalidated Analysis Request
298
    :type ar: IAnalysisRequest
299
    :rtype: IAnalysisRequest
300
    """
301
    if not ar:
302
        raise ValueError("Source Analysis Request cannot be None")
303
304
    if not IAnalysisRequest.providedBy(ar):
305
        raise ValueError("Type not supported: {}".format(repr(type(ar))))
306
307
    if ar.getRetest():
308
        # Do not allow the creation of another retest!
309
        raise ValueError("Retest already set")
310
311
    if not ar.isInvalid():
312
        # Analysis Request must be in 'invalid' state
313
        raise ValueError("Cannot do a retest from an invalid Analysis Request"
314
                         .format(repr(ar)))
315
316
    # Open the actions pool
317
    actions_pool = ActionHandlerPool.get_instance()
318
    actions_pool.queue_pool()
319
320
    # Create the Retest (Analysis Request)
321
    ignore = ['Analyses', 'DatePublished', 'Invalidated', 'Sample']
322
    retest = _createObjectByType("AnalysisRequest", ar.aq_parent, tmpID())
323
    copy_field_values(ar, retest, ignore_fieldnames=ignore)
324
325
    # Mark the retest with the `IAnalysisRequestRetest` interface
326
    alsoProvides(retest, IAnalysisRequestRetest)
327
328
    # Assign the source to retest
329
    retest.setInvalidated(ar)
330
331
    # Rename the retest according to the ID server setup
332
    renameAfterCreation(retest)
333
334
    # Copy the analyses from the source
335
    intermediate_states = ['retracted', 'reflexed']
336
    for an in ar.getAnalyses(full_objects=True):
337
        if (api.get_workflow_status_of(an) in intermediate_states):
338
            # Exclude intermediate analyses
339
            continue
340
341
        nan = _createObjectByType("Analysis", retest, an.getKeyword())
342
343
        # Make a copy
344
        ignore_fieldnames = ['DataAnalysisPublished']
345
        copy_field_values(an, nan, ignore_fieldnames=ignore_fieldnames)
346
        nan.unmarkCreationFlag()
347
        push_reindex_to_actions_pool(nan)
348
349
    # Transition the retest to "sample_received"!
350
    changeWorkflowState(retest, 'bika_ar_workflow', 'sample_received')
351
352
    # Initialize analyses
353
    for analysis in retest.getAnalyses(full_objects=True):
354
        if not IRoutineAnalysis.providedBy(analysis):
355
            continue
356
        changeWorkflowState(analysis, "bika_analysis_workflow", "unassigned")
357
358
    # Reindex and other stuff
359
    push_reindex_to_actions_pool(retest)
360
    push_reindex_to_actions_pool(retest.aq_parent)
361
362
    # Resume the actions pool
363
    actions_pool.resume()
364
    return retest
365
366
367
def create_partition(analysis_request, request, analyses, sample_type=None,
368
                     container=None, preservation=None, skip_fields=None,
369
                     remove_primary_analyses=True):
370
    """
371
    Creates a partition for the analysis_request (primary) passed in
372
    :param analysis_request: uid/brain/object of IAnalysisRequest type
373
    :param request: the current request object
374
    :param analyses: uids/brains/objects of IAnalysis type
375
    :param sampletype: uid/brain/object of SampleType
376
    :param container: uid/brain/object of Container
377
    :param preservation: uid/brain/object of Preservation
378
    :param skip_fields: names of fields to be skipped on copy from primary
379
    :param remove_primary_analyses: removes the analyses from the parent
380
    :return: the new partition
381
    """
382
    partition_skip_fields = [
383
        "Analyses",
384
        "Attachment",
385
        "Client",
386
        "Profile",
387
        "Profiles",
388
        "RejectionReasons",
389
        "Remarks",
390
        "ResultsInterpretation",
391
        "ResultsInterpretationDepts",
392
        "Sample",
393
        "Template",
394
        "creation_date",
395
        "id",
396
        "modification_date",
397
        "ParentAnalysisRequest",
398
        "PrimaryAnalysisRequest",
399
    ]
400
    if skip_fields:
401
        partition_skip_fields.extend(skip_fields)
402
        partition_skip_fields = list(set(partition_skip_fields))
403
404
    # Copy field values from the primary analysis request
405
    ar = api.get_object(analysis_request)
406
    record = fields_to_dict(ar, partition_skip_fields)
407
408
    # Update with values that are partition-specific
409
    record.update({
410
        "InternalUse": True,
411
        "ParentAnalysisRequest": api.get_uid(ar),
412
    })
413
    if sample_type is not None:
414
        record["SampleType"] = sample_type and api.get_uid(sample_type) or ""
415
    if container is not None:
416
        record["Container"] = container and api.get_uid(container) or ""
417
    if preservation is not None:
418
        record["Preservation"] = preservation and api.get_uid(preservation) or ""
419
420
    # Create the Partition
421
    client = ar.getClient()
422
    analyses = list(set(map(api.get_object, analyses)))
423
    services = map(lambda an: an.getAnalysisService(), analyses)
424
    specs = ar.getSpecification()
425
    specs = specs and specs.getResultsRange() or []
426
    partition = create_analysisrequest(client, request=request, values=record,
427
                                       analyses=services, specifications=specs)
428
429
    # Remove analyses from the primary
430
    if remove_primary_analyses:
431
        analyses_ids = map(api.get_id, analyses)
432
        ar.manage_delObjects(analyses_ids)
433
434
    # Reindex Parent Analysis Request
435
    ar.reindexObject(idxs=["isRootAncestor"])
436
437
    # Manually set the Date Received to match with its parent. This is
438
    # necessary because crar calls to processForm, so DateReceived is not
439
    # set because the partition has not been received yet
440
    partition.setDateReceived(ar.getDateReceived())
441
    partition.reindexObject(idxs="getDateReceived")
442
443
    # Force partition to same status as the primary
444
    status = api.get_workflow_status_of(ar)
445
    changeWorkflowState(partition, "bika_ar_workflow", status)
446
447
    # And initialize the analyses the partition contains. This is required
448
    # here because the transition "initialize" of analyses rely on a guard,
449
    # so the initialization can only be performed when the sample has been
450
    # received (DateReceived is set)
451
    ActionHandlerPool.get_instance().queue_pool()
452
    for analysis in partition.getAnalyses(full_objects=True):
453
        doActionFor(analysis, "initialize")
454
    ActionHandlerPool.get_instance().resume()
455
    return partition
456
457
458
def fields_to_dict(obj, skip_fields=None):
459
    """
460
    Generates a dictionary with the field values of the object passed in, where
461
    keys are the field names. Skips computed fields
462
    """
463
    data = {}
464
    obj = api.get_object(obj)
465
    for field_name, field in api.get_fields(obj).items():
466
        if skip_fields and field_name in skip_fields:
467
            continue
468
        if field.type == "computed":
469
            continue
470
        data[field_name] = field.get(obj)
471
    return data
472