1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# This file is part of SENAITE.CORE |
4
|
|
|
# |
5
|
|
|
# Copyright 2018 by it's authors. |
6
|
|
|
# Some rights reserved. See LICENSE.rst, CONTRIBUTORS.rst. |
7
|
|
|
|
8
|
|
|
import itertools |
9
|
|
|
import re |
10
|
|
|
|
11
|
|
|
import transaction |
12
|
|
|
from DateTime import DateTime |
13
|
|
|
from Products.ATContentTypes.utils import DT2dt |
14
|
|
|
from bika.lims import api |
15
|
|
|
from bika.lims import logger |
16
|
|
|
from bika.lims.alphanumber import Alphanumber |
17
|
|
|
from bika.lims.alphanumber import to_alpha |
18
|
|
|
from bika.lims.browser.fields.uidreferencefield import \ |
19
|
|
|
get_backreferences as get_backuidreferences |
20
|
|
|
from bika.lims.interfaces import IIdServer, IAnalysisRequestPartition |
21
|
|
|
from bika.lims.numbergenerator import INumberGenerator |
22
|
|
|
from zope.component import getAdapters |
23
|
|
|
from zope.component import getUtility |
24
|
|
|
|
25
|
|
|
|
26
|
|
|
def get_objects_in_sequence(brain_or_object, ctype, cref): |
27
|
|
|
"""Return a list of items |
28
|
|
|
""" |
29
|
|
|
obj = api.get_object(brain_or_object) |
30
|
|
|
if ctype == "backreference": |
31
|
|
|
return get_backreferences(obj, cref) |
32
|
|
|
if ctype == "contained": |
33
|
|
|
return get_contained_items(obj, cref) |
34
|
|
|
raise ValueError("Reference value is mandatory for sequence type counter") |
35
|
|
|
|
36
|
|
|
|
37
|
|
|
def get_backreferences(obj, relationship): |
38
|
|
|
"""Returns the backreferences |
39
|
|
|
""" |
40
|
|
|
refs = get_backuidreferences(obj, relationship) |
41
|
|
|
|
42
|
|
|
# TODO remove after all ReferenceField get ported to UIDReferenceField |
43
|
|
|
# At this moment, there are still some content types that are using the |
44
|
|
|
# ReferenceField, so we need to fallback to traditional getBackReferences |
45
|
|
|
# for these cases. |
46
|
|
|
if not refs: |
47
|
|
|
refs = obj.getBackReferences(relationship) |
48
|
|
|
|
49
|
|
|
return refs |
50
|
|
|
|
51
|
|
|
|
52
|
|
|
def get_contained_items(obj, spec): |
53
|
|
|
"""Returns a list of (id, subobject) tuples of the current context. |
54
|
|
|
If 'spec' is specified, returns only objects whose meta_type match 'spec' |
55
|
|
|
""" |
56
|
|
|
return obj.objectItems(spec) |
57
|
|
|
|
58
|
|
|
|
59
|
|
|
def get_type_id(context, **kw): |
60
|
|
|
"""Returns the type id for the context passed in |
61
|
|
|
""" |
62
|
|
|
portal_type = kw.get("portal_type", None) |
63
|
|
|
if portal_type: |
64
|
|
|
return portal_type |
65
|
|
|
|
66
|
|
|
if IAnalysisRequestPartition.providedBy(context): |
67
|
|
|
return "AnalysisRequestPartition" |
68
|
|
|
|
69
|
|
|
return api.get_portal_type(context) |
70
|
|
|
|
71
|
|
|
|
72
|
|
|
def get_config(context, **kw): |
73
|
|
|
"""Fetch the config dict from the Bika Setup for the given portal_type |
74
|
|
|
""" |
75
|
|
|
# get the ID formatting config |
76
|
|
|
config_map = api.get_bika_setup().getIDFormatting() |
77
|
|
|
|
78
|
|
|
# allow portal_type override |
79
|
|
|
portal_type = get_type_id(context, **kw) |
80
|
|
|
|
81
|
|
|
# check if we have a config for the given portal_type |
82
|
|
|
for config in config_map: |
83
|
|
|
if config['portal_type'].lower() == portal_type.lower(): |
84
|
|
|
return config |
85
|
|
|
|
86
|
|
|
# return a default config |
87
|
|
|
default_config = { |
88
|
|
|
'form': '%s-{seq}' % portal_type.lower(), |
89
|
|
|
'sequence_type': 'generated', |
90
|
|
|
'prefix': '%s' % portal_type.lower(), |
91
|
|
|
} |
92
|
|
|
return default_config |
93
|
|
|
|
94
|
|
|
|
95
|
|
|
def get_variables(context, **kw): |
96
|
|
|
"""Prepares a dictionary of key->value pairs usable for ID formatting |
97
|
|
|
""" |
98
|
|
|
# allow portal_type override |
99
|
|
|
portal_type = get_type_id(context, **kw) |
100
|
|
|
|
101
|
|
|
# The variables map hold the values that might get into the constructed id |
102
|
|
|
variables = { |
103
|
|
|
'context': context, |
104
|
|
|
'id': api.get_id(context), |
105
|
|
|
'portal_type': portal_type, |
106
|
|
|
'year': get_current_year(), |
107
|
|
|
'parent': api.get_parent(context), |
108
|
|
|
'seq': 0, |
109
|
|
|
'alpha': Alphanumber(0), |
110
|
|
|
} |
111
|
|
|
|
112
|
|
|
# Augment the variables map depending on the portal type |
113
|
|
|
if portal_type in ["AnalysisRequest", "AnalysisRequestPartition"]: |
114
|
|
|
now = DateTime() |
115
|
|
|
sampling_date = context.getSamplingDate() |
116
|
|
|
sampling_date = sampling_date and DT2dt(sampling_date) or DT2dt(now) |
117
|
|
|
date_sampled = context.getDateSampled() |
118
|
|
|
date_sampled = date_sampled and DT2dt(date_sampled) or DT2dt(now) |
119
|
|
|
variables.update({ |
120
|
|
|
'clientId': context.getClientID(), |
121
|
|
|
'dateSampled': date_sampled, |
122
|
|
|
'samplingDate': sampling_date, |
123
|
|
|
'sampleType': context.getSampleType().getPrefix() |
124
|
|
|
}) |
125
|
|
|
if portal_type == "AnalysisRequestPartition": |
126
|
|
|
parent_ar = context.getParentAnalysisRequest() |
127
|
|
|
variables.update({ |
128
|
|
|
"parent_analysisrequest": parent_ar, |
129
|
|
|
"parent_ar_id": api.get_id(parent_ar) |
130
|
|
|
}) |
131
|
|
|
|
132
|
|
|
elif portal_type == "ARReport": |
133
|
|
|
variables.update({ |
134
|
|
|
'clientId': context.aq_parent.getClientID(), |
135
|
|
|
}) |
136
|
|
|
|
137
|
|
|
return variables |
138
|
|
|
|
139
|
|
|
|
140
|
|
|
def split(string, separator="-"): |
141
|
|
|
""" split a string on the given separator |
142
|
|
|
""" |
143
|
|
|
if not isinstance(string, basestring): |
144
|
|
|
return [] |
145
|
|
|
return string.split(separator) |
146
|
|
|
|
147
|
|
|
|
148
|
|
|
def to_int(thing, default=0): |
149
|
|
|
"""Convert a thing to an integer |
150
|
|
|
""" |
151
|
|
|
try: |
152
|
|
|
return int(thing) |
153
|
|
|
except (TypeError, ValueError): |
154
|
|
|
return default |
155
|
|
|
|
156
|
|
|
|
157
|
|
|
def slice(string, separator="-", start=None, end=None): |
158
|
|
|
"""Slice out a segment of a string, which is splitted on both the wildcards |
159
|
|
|
and the separator passed in, if any |
160
|
|
|
""" |
161
|
|
|
# split by wildcards/keywords first |
162
|
|
|
# AR-{sampleType}-{parentId}{alpha:3a2d} |
163
|
|
|
segments = filter(None, re.split('(\{.+?\})', string)) |
164
|
|
|
# ['AR-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}'] |
165
|
|
|
if separator: |
166
|
|
|
# Keep track of singleton separators as empties |
167
|
|
|
# We need to do this to prevent duplicates later, when splitting |
168
|
|
|
segments = map(lambda seg: seg!=separator and seg or "", segments) |
169
|
|
|
# ['AR-', '{sampleType}', '', '{parentId}', '{alpha:3a2d}'] |
170
|
|
|
# Split each segment at the given separator |
171
|
|
|
segments = map(lambda seg: split(seg, separator), segments) |
172
|
|
|
# [['AR', ''], ['{sampleType}'], [''], ['{parentId}'], ['{alpha:3a2d}']] |
173
|
|
|
# Flatten the list |
174
|
|
|
segments = list(itertools.chain.from_iterable(segments)) |
175
|
|
|
# ['AR', '', '{sampleType}', '', '{parentId}', '{alpha:3a2d}'] |
176
|
|
|
# And replace empties with separator |
177
|
|
|
segments = map(lambda seg: seg!="" and seg or separator, segments) |
178
|
|
|
# ['AR', '-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}'] |
179
|
|
|
|
180
|
|
|
# Get the start and end positions from the segments without separator |
181
|
|
|
cleaned_segments = filter(lambda seg: seg!=separator, segments) |
182
|
|
|
start_pos = to_int(start, 0) |
183
|
|
|
# Note "end" is not a position, but the number of elements to join! |
184
|
|
|
end_pos = to_int(end, len(cleaned_segments) - start_pos) + start_pos - 1 |
185
|
|
|
|
186
|
|
|
# Map the positions against the segments with separator |
187
|
|
|
start = segments.index(cleaned_segments[start_pos]) |
188
|
|
|
end = segments.index(cleaned_segments[end_pos]) + 1 |
189
|
|
|
|
190
|
|
|
# Return all segments joined |
191
|
|
|
sliced_parts = segments[start:end] |
192
|
|
|
return "".join(sliced_parts) |
193
|
|
|
|
194
|
|
|
|
195
|
|
|
def get_current_year(): |
196
|
|
|
"""Returns the current year as a two digit string |
197
|
|
|
""" |
198
|
|
|
return DateTime().strftime("%Y")[2:] |
199
|
|
|
|
200
|
|
|
|
201
|
|
|
def search_by_prefix(portal_type, prefix): |
202
|
|
|
"""Returns brains which share the same portal_type and ID prefix |
203
|
|
|
""" |
204
|
|
|
catalog = api.get_tool("uid_catalog") |
205
|
|
|
brains = catalog({"portal_type": portal_type}) |
206
|
|
|
# Filter brains with the same ID prefix |
207
|
|
|
return filter(lambda brain: api.get_id(brain).startswith(prefix), brains) |
208
|
|
|
|
209
|
|
|
|
210
|
|
|
def get_ids_with_prefix(portal_type, prefix): |
211
|
|
|
"""Return a list of ids sharing the same portal type and prefix |
212
|
|
|
""" |
213
|
|
|
brains = search_by_prefix(portal_type, prefix) |
214
|
|
|
ids = map(api.get_id, brains) |
215
|
|
|
return ids |
216
|
|
|
|
217
|
|
|
|
218
|
|
|
def make_storage_key(portal_type, prefix=None): |
219
|
|
|
"""Make a storage (dict-) key for the number generator |
220
|
|
|
""" |
221
|
|
|
key = portal_type.lower() |
222
|
|
|
if prefix: |
223
|
|
|
key = "{}-{}".format(key, prefix) |
224
|
|
|
return key |
225
|
|
|
|
226
|
|
|
|
227
|
|
|
def get_seq_number_from_id(id, id_template, prefix, **kw): |
228
|
|
|
"""Return the sequence number of the given ID |
229
|
|
|
""" |
230
|
|
|
separator = kw.get("separator", "-") |
231
|
|
|
postfix = id.replace(prefix, "").strip(separator) |
232
|
|
|
postfix_segments = postfix.split(separator) |
233
|
|
|
seq_number = 0 |
234
|
|
|
possible_seq_nums = filter(lambda n: n.isalnum(), postfix_segments) |
235
|
|
|
if possible_seq_nums: |
236
|
|
|
seq_number = possible_seq_nums[-1] |
237
|
|
|
|
238
|
|
|
# Check if this id has to be expressed as an alphanumeric number |
239
|
|
|
seq_number = get_alpha_or_number(seq_number, id_template) |
240
|
|
|
seq_number = to_int(seq_number) |
241
|
|
|
return seq_number |
242
|
|
|
|
243
|
|
|
|
244
|
|
|
def get_alpha_or_number(number, template): |
245
|
|
|
"""Returns an Alphanumber that represents the number passed in, expressed |
246
|
|
|
as defined in the template. Otherwise, returns the number |
247
|
|
|
""" |
248
|
|
|
match = re.match(r".*\{alpha:(\d+a\d+d)\}$", template.strip()) |
249
|
|
|
if match and match.groups(): |
250
|
|
|
format = match.groups()[0] |
251
|
|
|
return to_alpha(number, format) |
252
|
|
|
return number |
253
|
|
|
|
254
|
|
|
|
255
|
|
|
def get_counted_number(context, config, variables, **kw): |
256
|
|
|
"""Compute the number for the sequence type "Counter" |
257
|
|
|
""" |
258
|
|
|
# This "context" is defined by the user in Bika Setup and can be actually |
259
|
|
|
# anything. However, we assume it is something like "sample" or similar |
260
|
|
|
ctx = config.get("context") |
261
|
|
|
|
262
|
|
|
# get object behind the context name (falls back to the current context) |
263
|
|
|
obj = variables.get(ctx, context) |
264
|
|
|
|
265
|
|
|
# get the counter type, which is either "backreference" or "contained" |
266
|
|
|
counter_type = config.get("counter_type") |
267
|
|
|
|
268
|
|
|
# the counter reference is either the "relationship" for |
269
|
|
|
# "backreference" or the meta type for contained objects |
270
|
|
|
counter_reference = config.get("counter_reference") |
271
|
|
|
|
272
|
|
|
# This should be a list of existing items, including the current context |
273
|
|
|
# object |
274
|
|
|
seq_items = get_objects_in_sequence(obj, counter_type, counter_reference) |
275
|
|
|
|
276
|
|
|
number = len(seq_items) |
277
|
|
|
return number |
278
|
|
|
|
279
|
|
|
|
280
|
|
|
def get_generated_number(context, config, variables, **kw): |
281
|
|
|
"""Generate a new persistent number with the number generator for the |
282
|
|
|
sequence type "Generated" |
283
|
|
|
""" |
284
|
|
|
# separator where to split the ID |
285
|
|
|
separator = kw.get('separator', '-') |
286
|
|
|
|
287
|
|
|
# allow portal_type override |
288
|
|
|
portal_type = get_type_id(context, **kw) |
289
|
|
|
|
290
|
|
|
# The ID format for string interpolation, e.g. WS-{seq:03d} |
291
|
|
|
id_template = config.get("form", "") |
292
|
|
|
|
293
|
|
|
# The split length defines where the variable part of the ID template begins |
294
|
|
|
split_length = config.get("split_length", 1) |
295
|
|
|
|
296
|
|
|
# The prefix tempalte is the static part of the ID |
297
|
|
|
prefix_template = slice(id_template, separator=separator, end=split_length) |
298
|
|
|
|
299
|
|
|
# get the number generator |
300
|
|
|
number_generator = getUtility(INumberGenerator) |
301
|
|
|
|
302
|
|
|
# generate the key for the number generator storage |
303
|
|
|
prefix = prefix_template.format(**variables) |
304
|
|
|
|
305
|
|
|
# normalize out any unicode characters like Ö, É, etc. from the prefix |
306
|
|
|
prefix = api.normalize_filename(prefix) |
307
|
|
|
|
308
|
|
|
# The key used for the storage |
309
|
|
|
key = make_storage_key(portal_type, prefix) |
310
|
|
|
|
311
|
|
|
# Handle flushed storage |
312
|
|
|
if key not in number_generator: |
313
|
|
|
max_num = 0 |
314
|
|
|
existing = get_ids_with_prefix(portal_type, prefix) |
315
|
|
|
numbers = map(lambda id: get_seq_number_from_id(id, id_template, prefix), existing) |
316
|
|
|
# figure out the highest number in the sequence |
317
|
|
|
if numbers: |
318
|
|
|
max_num = max(numbers) |
319
|
|
|
# set the number generator |
320
|
|
|
logger.info("*** SEEDING Prefix '{}' to {}".format(prefix, max_num)) |
321
|
|
|
number_generator.set_number(key, max_num) |
322
|
|
|
|
323
|
|
|
if not kw.get("dry_run", False): |
324
|
|
|
# Generate a new number |
325
|
|
|
# NOTE Even when the number exceeds the given ID sequence format, |
326
|
|
|
# it will overflow gracefully, e.g. |
327
|
|
|
# >>> {sampleId}-R{seq:03d}'.format(sampleId="Water", seq=999999) |
328
|
|
|
# 'Water-R999999‘ |
329
|
|
|
number = number_generator.generate_number(key=key) |
330
|
|
|
else: |
331
|
|
|
# => This allows us to "preview" the next generated ID in the UI |
332
|
|
|
# TODO Show the user the next generated number somewhere in the UI |
333
|
|
|
number = number_generator.get(key, 1) |
334
|
|
|
|
335
|
|
|
# Return an int or Alphanumber |
336
|
|
|
return get_alpha_or_number(number, id_template) |
337
|
|
|
|
338
|
|
|
|
339
|
|
|
def generateUniqueId(context, **kw): |
340
|
|
|
""" Generate pretty content IDs. |
341
|
|
|
""" |
342
|
|
|
|
343
|
|
|
# get the config for this portal type from the system setup |
344
|
|
|
config = get_config(context, **kw) |
345
|
|
|
|
346
|
|
|
# get the variables map for later string interpolation |
347
|
|
|
variables = get_variables(context, **kw) |
348
|
|
|
|
349
|
|
|
# The new generate sequence number |
350
|
|
|
number = 0 |
351
|
|
|
|
352
|
|
|
# get the sequence type from the global config |
353
|
|
|
sequence_type = config.get("sequence_type", "generated") |
354
|
|
|
|
355
|
|
|
# Sequence Type is "Counter", so we use the length of the backreferences or |
356
|
|
|
# contained objects of the evaluated "context" defined in the config |
357
|
|
|
if sequence_type == 'counter': |
358
|
|
|
number = get_counted_number(context, config, variables, **kw) |
359
|
|
|
|
360
|
|
|
# Sequence Type is "Generated", so the ID is constructed according to the |
361
|
|
|
# configured split length |
362
|
|
|
if sequence_type == 'generated': |
363
|
|
|
number = get_generated_number(context, config, variables, **kw) |
364
|
|
|
|
365
|
|
|
# store the new sequence number to the variables map for str interpolation |
366
|
|
|
if isinstance(number, Alphanumber): |
367
|
|
|
variables["alpha"] = number |
368
|
|
|
variables["seq"] = int(number) |
369
|
|
|
|
370
|
|
|
# The ID formatting template from user config, e.g. {sampleId}-R{seq:02d} |
371
|
|
|
id_template = config.get("form", "") |
372
|
|
|
|
373
|
|
|
# Interpolate the ID template |
374
|
|
|
try: |
375
|
|
|
new_id = id_template.format(**variables) |
376
|
|
|
except KeyError, e: |
377
|
|
|
logger.error('KeyError: {} not in id_template {}'.format( |
378
|
|
|
e, id_template)) |
379
|
|
|
raise |
380
|
|
|
normalized_id = api.normalize_filename(new_id) |
381
|
|
|
logger.info("generateUniqueId: {}".format(normalized_id)) |
382
|
|
|
|
383
|
|
|
return normalized_id |
384
|
|
|
|
385
|
|
|
|
386
|
|
|
def renameAfterCreation(obj): |
387
|
|
|
"""Rename the content after it was created/added |
388
|
|
|
""" |
389
|
|
|
# Check if the _bika_id was already set |
390
|
|
|
bika_id = getattr(obj, "_bika_id", None) |
391
|
|
|
if bika_id is not None: |
392
|
|
|
return bika_id |
393
|
|
|
# Can't rename without a subtransaction commit when using portal_factory |
394
|
|
|
transaction.savepoint(optimistic=True) |
395
|
|
|
# The id returned should be normalized already |
396
|
|
|
new_id = None |
397
|
|
|
# Checking if an adapter exists for this content type. If yes, we will |
398
|
|
|
# get new_id from adapter. |
399
|
|
|
for name, adapter in getAdapters((obj, ), IIdServer): |
400
|
|
|
if new_id: |
401
|
|
|
logger.warn(('More than one ID Generator Adapter found for' |
402
|
|
|
'content type -> %s') % obj.portal_type) |
403
|
|
|
new_id = adapter.generate_id(obj.portal_type) |
404
|
|
|
if not new_id: |
405
|
|
|
new_id = generateUniqueId(obj) |
406
|
|
|
|
407
|
|
|
# TODO: This is a naive check just in current folder |
408
|
|
|
# -> this should check globally for duplicate objects with same prefix |
409
|
|
|
# N.B. a check like `search_by_prefix` each time would probably slow things |
410
|
|
|
# down too much! |
411
|
|
|
# -> A solution could be to store all IDs with a certain prefix in a storage |
412
|
|
|
parent = api.get_parent(obj) |
413
|
|
|
if new_id in parent.objectIds(): |
414
|
|
|
# XXX We could do the check in a `while` loop and generate a new one. |
415
|
|
|
raise KeyError("The ID {} is already taken in the path {}".format( |
416
|
|
|
new_id, api.get_path(parent))) |
417
|
|
|
# rename the object to the new id |
418
|
|
|
parent.manage_renameObject(obj.id, new_id) |
419
|
|
|
|
420
|
|
|
return new_id |
421
|
|
|
|