|
1
|
|
|
# -*- coding: utf-8 -*- |
|
2
|
|
|
|
|
3
|
|
|
from collections import OrderedDict |
|
4
|
|
|
from collections import defaultdict |
|
5
|
|
|
|
|
6
|
|
|
from bika.lims import bikaMessageFactory as _ |
|
7
|
|
|
from bika.lims import logger |
|
8
|
|
|
from bika.lims.decorators import returns_super_model |
|
9
|
|
|
from bika.lims.workflow import doActionFor |
|
10
|
|
|
from bika.lims import api |
|
11
|
|
|
from bika.lims.interfaces import IProxyField |
|
12
|
|
|
from bika.lims.utils.analysisrequest import create_analysisrequest as crar |
|
13
|
|
|
from Products.Five.browser import BrowserView |
|
14
|
|
|
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile |
|
15
|
|
|
|
|
16
|
|
|
DEFAULT_NUMBER_OF_PARTITIONS = 0 |
|
17
|
|
|
|
|
18
|
|
|
PARTITION_SKIP_FIELDS = [ |
|
19
|
|
|
"Analyses", |
|
20
|
|
|
"Attachment", |
|
21
|
|
|
"Client", |
|
22
|
|
|
"Container", |
|
23
|
|
|
"Preservation", |
|
24
|
|
|
"Profile", |
|
25
|
|
|
"Profiles", |
|
26
|
|
|
"RejectionReasons", |
|
27
|
|
|
"Remarks", |
|
28
|
|
|
"ResultsInterpretation", |
|
29
|
|
|
"ResultsInterpretationDepts", |
|
30
|
|
|
"Sample", |
|
31
|
|
|
"SampleType", |
|
32
|
|
|
"Template", |
|
33
|
|
|
"creation_date", |
|
34
|
|
|
"id", |
|
35
|
|
|
"modification_date", |
|
36
|
|
|
"ParentAnalysisRequest", |
|
37
|
|
|
] |
|
38
|
|
|
|
|
39
|
|
|
|
|
40
|
|
|
class PartitionMagicView(BrowserView): |
|
41
|
|
|
"""Manage Partitions of primary ARs |
|
42
|
|
|
""" |
|
43
|
|
|
template = ViewPageTemplateFile("templates/partition_magic.pt") |
|
44
|
|
|
|
|
45
|
|
|
def __init__(self, context, request): |
|
46
|
|
|
super(PartitionMagicView, self).__init__(context, request) |
|
47
|
|
|
self.context = context |
|
48
|
|
|
self.request = request |
|
49
|
|
|
self.back_url = self.context.absolute_url() |
|
50
|
|
|
self.analyses_to_remove = dict() |
|
51
|
|
|
|
|
52
|
|
|
|
|
53
|
|
|
def __call__(self): |
|
54
|
|
|
form = self.request.form |
|
55
|
|
|
|
|
56
|
|
|
# Form submit toggle |
|
57
|
|
|
form_submitted = form.get("submitted", False) |
|
58
|
|
|
|
|
59
|
|
|
# Buttons |
|
60
|
|
|
form_preview = form.get("button_preview", False) |
|
61
|
|
|
form_create = form.get("button_create", False) |
|
62
|
|
|
form_cancel = form.get("button_cancel", False) |
|
63
|
|
|
|
|
64
|
|
|
objs = self.get_objects() |
|
65
|
|
|
|
|
66
|
|
|
# No ARs selected |
|
67
|
|
|
if not objs: |
|
68
|
|
|
return self.redirect(message=_("No items selected"), |
|
69
|
|
|
level="warning") |
|
70
|
|
|
|
|
71
|
|
|
# Handle preview |
|
72
|
|
|
if form_submitted and form_preview: |
|
73
|
|
|
logger.info("*** PREVIEW ***") |
|
74
|
|
|
|
|
75
|
|
|
# Handle create |
|
76
|
|
|
if form_submitted and form_create: |
|
77
|
|
|
logger.info("*** CREATE PARTITIONS ***") |
|
78
|
|
|
|
|
79
|
|
|
partitions = [] |
|
80
|
|
|
|
|
81
|
|
|
# create the partitions |
|
82
|
|
|
for partition in form.get("partitions", []): |
|
83
|
|
|
primary_uid = partition.get("primary_uid") |
|
84
|
|
|
sampletype_uid = partition.get("sampletype_uid") |
|
85
|
|
|
container_uid = partition.get("container_uid") |
|
86
|
|
|
preservation_uid = partition.get("preservation_uid") |
|
87
|
|
|
analyses_uids = partition.get("analyses") |
|
88
|
|
|
if not analyses_uids or not primary_uid: |
|
89
|
|
|
# Cannot create a partition w/o analyses! |
|
90
|
|
|
continue |
|
91
|
|
|
|
|
92
|
|
|
partition = self.create_partition( |
|
93
|
|
|
primary_uid=primary_uid, |
|
94
|
|
|
sampletype_uid=sampletype_uid, |
|
95
|
|
|
container_uid=container_uid, |
|
96
|
|
|
preservation_uid=preservation_uid, |
|
97
|
|
|
analyses_uids=analyses_uids) |
|
98
|
|
|
partitions.append(partition) |
|
99
|
|
|
logger.info("Successfully created partition: {}".format( |
|
100
|
|
|
api.get_path(partition))) |
|
101
|
|
|
|
|
102
|
|
|
# Force the reception of the partition |
|
103
|
|
|
doActionFor(partition, "receive") |
|
104
|
|
|
|
|
105
|
|
|
if not partitions: |
|
106
|
|
|
# If no partitions were created, show a warning message |
|
107
|
|
|
return self.redirect(message=_("No partitions were created")) |
|
108
|
|
|
|
|
109
|
|
|
# Remove analyses from primary Analysis Requests |
|
110
|
|
|
self.remove_primary_analyses() |
|
111
|
|
|
|
|
112
|
|
|
message = _("Created {} partitions: {}".format( |
|
113
|
|
|
len(partitions), ", ".join(map(api.get_title, partitions)))) |
|
114
|
|
|
return self.redirect(message=message) |
|
115
|
|
|
|
|
116
|
|
|
# Handle cancel |
|
117
|
|
|
if form_submitted and form_cancel: |
|
118
|
|
|
logger.info("*** CANCEL ***") |
|
119
|
|
|
return self.redirect(message=_("Partitioning canceled")) |
|
120
|
|
|
|
|
121
|
|
|
return self.template() |
|
122
|
|
|
|
|
123
|
|
|
def create_partition(self, primary_uid, sampletype_uid, container_uid, |
|
124
|
|
|
preservation_uid, analyses_uids): |
|
125
|
|
|
"""Create a new partition (AR) |
|
126
|
|
|
""" |
|
127
|
|
|
logger.info("*** CREATE PARTITION ***") |
|
128
|
|
|
|
|
129
|
|
|
ar = self.get_object_by_uid(primary_uid) |
|
130
|
|
|
record = { |
|
131
|
|
|
"InternalUse": True, |
|
132
|
|
|
"ParentAnalysisRequest": primary_uid, |
|
133
|
|
|
"SampleType": sampletype_uid, |
|
134
|
|
|
"Container": container_uid, |
|
135
|
|
|
"Preservation": preservation_uid, |
|
136
|
|
|
} |
|
137
|
|
|
|
|
138
|
|
|
for fieldname, field in api.get_fields(ar).items(): |
|
139
|
|
|
# if self.is_proxy_field(field): |
|
140
|
|
|
# logger.info("Skipping proxy field {}".format(fieldname)) |
|
141
|
|
|
# continue |
|
142
|
|
|
if self.is_computed_field(field): |
|
143
|
|
|
logger.info("Skipping computed field {}".format(fieldname)) |
|
144
|
|
|
continue |
|
145
|
|
|
if fieldname in PARTITION_SKIP_FIELDS: |
|
146
|
|
|
logger.info("Skipping field {}".format(fieldname)) |
|
147
|
|
|
continue |
|
148
|
|
|
fieldvalue = field.get(ar) |
|
149
|
|
|
record[fieldname] = fieldvalue |
|
150
|
|
|
logger.info("Update record '{}': {}".format( |
|
151
|
|
|
fieldname, repr(fieldvalue))) |
|
152
|
|
|
|
|
153
|
|
|
client = ar.getClient() |
|
154
|
|
|
analyses = map(self.get_object_by_uid, analyses_uids) |
|
155
|
|
|
services = map(lambda an: an.getAnalysisService(), analyses) |
|
156
|
|
|
|
|
157
|
|
|
partition = crar( |
|
158
|
|
|
client, |
|
159
|
|
|
self.request, |
|
160
|
|
|
record, |
|
161
|
|
|
analyses=services, |
|
162
|
|
|
specifications=self.get_specifications_for(ar) |
|
163
|
|
|
) |
|
164
|
|
|
|
|
165
|
|
|
# Remove selected analyses from the parent Analysis Request |
|
166
|
|
|
self.push_primary_analyses_for_removal(ar, analyses) |
|
167
|
|
|
|
|
168
|
|
|
# Reindex Parent Analysis Request |
|
169
|
|
|
# TODO Workflow - AnalysisRequest - Partitions creation |
|
170
|
|
|
ar.reindexObject(idxs=["isRootAncestor"]) |
|
171
|
|
|
|
|
172
|
|
|
return partition |
|
173
|
|
|
|
|
174
|
|
|
def push_primary_analyses_for_removal(self, analysis_request, analyses): |
|
175
|
|
|
"""Stores the analyses to be removed after partitions creation |
|
176
|
|
|
""" |
|
177
|
|
|
to_remove = self.analyses_to_remove.get(analysis_request, []) |
|
178
|
|
|
to_remove.extend(analyses) |
|
179
|
|
|
self.analyses_to_remove[analysis_request] = to_remove |
|
180
|
|
|
|
|
181
|
|
|
def remove_primary_analyses(self): |
|
182
|
|
|
"""Remove analyses relocated to partitions |
|
183
|
|
|
""" |
|
184
|
|
|
for ar, analyses in self.analyses_to_remove.items(): |
|
185
|
|
|
analyses_ids = list(set(map(api.get_id, analyses))) |
|
186
|
|
|
ar.manage_delObjects(analyses_ids) |
|
187
|
|
|
self.analyses_to_remove = dict() |
|
188
|
|
|
|
|
189
|
|
|
def get_specifications_for(self, ar): |
|
190
|
|
|
"""Returns a mapping of service uid -> specification |
|
191
|
|
|
""" |
|
192
|
|
|
spec = ar.getSpecification() |
|
193
|
|
|
if not spec: |
|
194
|
|
|
return [] |
|
195
|
|
|
return spec.getResultsRange() |
|
196
|
|
|
|
|
197
|
|
|
def is_proxy_field(self, field): |
|
198
|
|
|
"""Checks if the field is a proxy field |
|
199
|
|
|
""" |
|
200
|
|
|
return IProxyField.providedBy(field) |
|
201
|
|
|
|
|
202
|
|
|
def is_computed_field(self, field): |
|
203
|
|
|
"""Checks if the field is a coumputed field |
|
204
|
|
|
""" |
|
205
|
|
|
return field.type == "computed" |
|
206
|
|
|
|
|
207
|
|
|
def get_ar_data(self): |
|
208
|
|
|
"""Returns a list of AR data |
|
209
|
|
|
""" |
|
210
|
|
|
for obj in self.get_objects(): |
|
211
|
|
|
info = self.get_base_info(obj) |
|
212
|
|
|
info.update({ |
|
213
|
|
|
"analyses": self.get_analysis_data_for(obj), |
|
214
|
|
|
"sampletype": self.get_base_info(obj.getSampleType()), |
|
215
|
|
|
"number_of_partitions": self.get_number_of_partitions_for(obj), |
|
216
|
|
|
"template": self.get_template_data_for(obj), |
|
217
|
|
|
}) |
|
218
|
|
|
yield info |
|
219
|
|
|
|
|
220
|
|
|
def get_sampletype_data(self): |
|
221
|
|
|
"""Returns a list of SampleType data |
|
222
|
|
|
""" |
|
223
|
|
|
for obj in self.get_sampletypes(): |
|
224
|
|
|
info = self.get_base_info(obj) |
|
225
|
|
|
yield info |
|
226
|
|
|
|
|
227
|
|
|
def get_container_data(self): |
|
228
|
|
|
"""Returns a list of Container data |
|
229
|
|
|
""" |
|
230
|
|
|
for obj in self.get_containers(): |
|
231
|
|
|
info = self.get_base_info(obj) |
|
232
|
|
|
yield info |
|
233
|
|
|
|
|
234
|
|
|
def get_preservation_data(self): |
|
235
|
|
|
"""Returns a list of Preservation data |
|
236
|
|
|
""" |
|
237
|
|
|
for obj in self.get_preservations(): |
|
238
|
|
|
info = self.get_base_info(obj) |
|
239
|
|
|
yield info |
|
240
|
|
|
|
|
241
|
|
|
def get_objects(self): |
|
242
|
|
|
"""Returns a list of objects coming from the "uids" request parameter |
|
243
|
|
|
""" |
|
244
|
|
|
# Create a mapping of source ARs for copy |
|
245
|
|
|
uids = self.request.form.get("uids", "") |
|
246
|
|
|
if not uids: |
|
247
|
|
|
# check for the `items` parammeter |
|
248
|
|
|
uids = self.request.form.get("items", "") |
|
249
|
|
|
if isinstance(uids, basestring): |
|
250
|
|
|
uids = uids.split(",") |
|
251
|
|
|
unique_uids = OrderedDict().fromkeys(uids).keys() |
|
252
|
|
|
return filter(None, map(self.get_object_by_uid, unique_uids)) |
|
253
|
|
|
|
|
254
|
|
|
def get_sampletypes(self): |
|
255
|
|
|
"""Returns the available SampleTypes of the system |
|
256
|
|
|
""" |
|
257
|
|
|
query = { |
|
258
|
|
|
"portal_type": "SampleType", |
|
259
|
|
|
"sort_on": "sortable_title", |
|
260
|
|
|
"sort_order": "ascending", |
|
261
|
|
|
"inactive_state": "active", |
|
262
|
|
|
} |
|
263
|
|
|
results = api.search(query, "bika_setup_catalog") |
|
264
|
|
|
return map(api.get_object, results) |
|
265
|
|
|
|
|
266
|
|
|
def get_containers(self): |
|
267
|
|
|
"""Returns the available Containers of the system |
|
268
|
|
|
""" |
|
269
|
|
|
query = dict(portal_type="Container", |
|
270
|
|
|
sort_on="sortable_title", |
|
271
|
|
|
sort_order="ascending", |
|
272
|
|
|
inactive_state="active") |
|
273
|
|
|
results = api.search(query, "bika_setup_catalog") |
|
274
|
|
|
return map(api.get_object, results) |
|
275
|
|
|
|
|
276
|
|
|
def get_preservations(self): |
|
277
|
|
|
"""Returns the available Preservations of the system |
|
278
|
|
|
""" |
|
279
|
|
|
query = dict(portal_type="Preservation", |
|
280
|
|
|
sort_on="sortable_title", |
|
281
|
|
|
sort_order="ascending", |
|
282
|
|
|
inactive_state="active") |
|
283
|
|
|
results = api.search(query, "bika_setup_catalog") |
|
284
|
|
|
return map(api.get_object, results) |
|
285
|
|
|
|
|
286
|
|
|
@returns_super_model |
|
287
|
|
|
def to_super_model(self, obj_or_objs): |
|
288
|
|
|
"""Returns a SuperModel for a given object or a list of Supermodels if |
|
289
|
|
|
a list of objects was passed in |
|
290
|
|
|
""" |
|
291
|
|
|
return obj_or_objs |
|
292
|
|
|
|
|
293
|
|
|
def get_analysis_data_for(self, ar): |
|
294
|
|
|
"""Return the Analysis data for this AR |
|
295
|
|
|
""" |
|
296
|
|
|
# Exclude analyses from children (partitions) |
|
297
|
|
|
analyses = ar.objectValues("Analysis") |
|
298
|
|
|
out = [] |
|
299
|
|
|
for an in analyses: |
|
300
|
|
|
info = self.get_base_info(an) |
|
301
|
|
|
info.update({ |
|
302
|
|
|
"service_uid": an.getServiceUID(), |
|
303
|
|
|
}) |
|
304
|
|
|
out.append(info) |
|
305
|
|
|
return out |
|
306
|
|
|
|
|
307
|
|
|
def get_template_data_for(self, ar): |
|
308
|
|
|
"""Return the Template data for this AR |
|
309
|
|
|
""" |
|
310
|
|
|
info = None |
|
311
|
|
|
template = ar.getTemplate() |
|
312
|
|
|
ar_sampletype_uid = api.get_uid(ar.getSampleType()) |
|
313
|
|
|
ar_container_uid = "" |
|
314
|
|
|
if ar.getContainer(): |
|
315
|
|
|
ar_container_uid = api.get_uid(ar.getContainer()) |
|
316
|
|
|
ar_preservation_uid = "" |
|
317
|
|
|
if ar.getPreservation(): |
|
318
|
|
|
ar_preservation_uid = api.get_uid(ar.getPreservation()) |
|
319
|
|
|
|
|
320
|
|
|
if template: |
|
321
|
|
|
info = self.get_base_info(template) |
|
322
|
|
|
|
|
323
|
|
|
analyses = template.getAnalyses() |
|
324
|
|
|
partition_analyses = map( |
|
325
|
|
|
lambda x: (x.get("partition"), x.get("service_uid")), analyses) |
|
326
|
|
|
|
|
327
|
|
|
analyses_by_partition = defaultdict(list) |
|
328
|
|
|
for partition, service_uid in partition_analyses: |
|
329
|
|
|
analyses_by_partition[partition].append(service_uid) |
|
330
|
|
|
|
|
331
|
|
|
sampletypes_by_partition = defaultdict(list) |
|
332
|
|
|
containers_by_partition = defaultdict(list) |
|
333
|
|
|
preservations_by_partition = defaultdict(list) |
|
334
|
|
|
for part in template.getPartitions(): |
|
335
|
|
|
part_id = part.get("part_id") |
|
336
|
|
|
sampletype_uid = part.get('sampletype_uid', ar_sampletype_uid) |
|
337
|
|
|
sampletypes_by_partition[part_id] = sampletype_uid |
|
338
|
|
|
container_uid = part.get("container_uid", ar_container_uid) |
|
339
|
|
|
containers_by_partition[part_id] = container_uid |
|
340
|
|
|
preserv_uid = part.get("preservation_uid", ar_preservation_uid) |
|
341
|
|
|
preservations_by_partition[part_id] = preserv_uid |
|
342
|
|
|
|
|
343
|
|
|
partitions = map(lambda p: p.get("part_id"), |
|
344
|
|
|
template.getPartitions()) |
|
345
|
|
|
info.update({ |
|
346
|
|
|
"analyses": analyses_by_partition, |
|
347
|
|
|
"partitions": partitions, |
|
348
|
|
|
"sample_types": sampletypes_by_partition, |
|
349
|
|
|
"containers": containers_by_partition, |
|
350
|
|
|
"preservations": preservations_by_partition, |
|
351
|
|
|
}) |
|
352
|
|
|
else: |
|
353
|
|
|
info = { |
|
354
|
|
|
"analyses": {}, |
|
355
|
|
|
"partitions": [], |
|
356
|
|
|
"sample_types": {}, |
|
357
|
|
|
"containers": {}, |
|
358
|
|
|
"preservations": {}, |
|
359
|
|
|
} |
|
360
|
|
|
return info |
|
361
|
|
|
|
|
362
|
|
|
def get_number_of_partitions_for(self, ar): |
|
363
|
|
|
"""Return the number of selected partitions |
|
364
|
|
|
""" |
|
365
|
|
|
# fetch the number of partitions from the request |
|
366
|
|
|
uid = api.get_uid(ar) |
|
367
|
|
|
num = self.request.get("primary", {}).get(uid) |
|
368
|
|
|
|
|
369
|
|
|
if num is None: |
|
370
|
|
|
# get the number of partitions from the template |
|
371
|
|
|
template = ar.getTemplate() |
|
372
|
|
|
if template: |
|
373
|
|
|
num = len(template.getPartitions()) |
|
374
|
|
|
else: |
|
375
|
|
|
num = DEFAULT_NUMBER_OF_PARTITIONS |
|
376
|
|
|
try: |
|
377
|
|
|
num = int(num) |
|
378
|
|
|
except (TypeError, ValueError): |
|
379
|
|
|
num = DEFAULT_NUMBER_OF_PARTITIONS |
|
380
|
|
|
return num |
|
381
|
|
|
|
|
382
|
|
|
def get_base_info(self, obj): |
|
383
|
|
|
"""Extract the base info from the given object |
|
384
|
|
|
""" |
|
385
|
|
|
obj = api.get_object(obj) |
|
386
|
|
|
review_state = api.get_workflow_status_of(obj) |
|
387
|
|
|
state_title = review_state.capitalize().replace("_", " ") |
|
388
|
|
|
return { |
|
389
|
|
|
"obj": obj, |
|
390
|
|
|
"id": api.get_id(obj), |
|
391
|
|
|
"uid": api.get_uid(obj), |
|
392
|
|
|
"title": api.get_title(obj), |
|
393
|
|
|
"path": api.get_path(obj), |
|
394
|
|
|
"url": api.get_url(obj), |
|
395
|
|
|
"review_state": review_state, |
|
396
|
|
|
"state_title": state_title, |
|
397
|
|
|
} |
|
398
|
|
|
|
|
399
|
|
|
def redirect(self, redirect_url=None, message=None, level="info"): |
|
400
|
|
|
"""Redirect with a message |
|
401
|
|
|
""" |
|
402
|
|
|
if redirect_url is None: |
|
403
|
|
|
redirect_url = self.back_url |
|
404
|
|
|
if message is not None: |
|
405
|
|
|
self.add_status_message(message, level) |
|
406
|
|
|
return self.request.response.redirect(redirect_url) |
|
407
|
|
|
|
|
408
|
|
|
def get_object_by_uid(self, uid): |
|
409
|
|
|
"""Get the object by UID |
|
410
|
|
|
""" |
|
411
|
|
|
logger.debug("get_object_by_uid::UID={}".format(uid)) |
|
412
|
|
|
obj = api.get_object_by_uid(uid, None) |
|
413
|
|
|
if obj is None: |
|
414
|
|
|
logger.warn("!! No object found for UID #{} !!") |
|
415
|
|
|
return obj |
|
416
|
|
|
|
|
417
|
|
|
def add_status_message(self, message, level="info"): |
|
418
|
|
|
"""Set a portal status message |
|
419
|
|
|
""" |
|
420
|
|
|
return self.context.plone_utils.addPortalMessage(message, level) |
|
421
|
|
|
|