|
1
|
|
|
# -*- coding: utf-8 -*- |
|
2
|
|
|
# |
|
3
|
|
|
# This file is part of SENAITE.CORE |
|
4
|
|
|
# |
|
5
|
|
|
# Copyright 2018 by it's authors. |
|
6
|
|
|
# Some rights reserved. See LICENSE.rst, CONTRIBUTORS.rst. |
|
7
|
|
|
|
|
8
|
|
|
import datetime |
|
9
|
|
|
import os.path |
|
10
|
|
|
import re |
|
11
|
|
|
|
|
12
|
|
|
from pkg_resources import resource_filename |
|
13
|
|
|
|
|
14
|
|
|
import transaction |
|
15
|
|
|
from bika.lims import api |
|
16
|
|
|
from bika.lims import bikaMessageFactory as _ |
|
17
|
|
|
from bika.lims import logger |
|
18
|
|
|
from bika.lims.exportimport.dataimport import SetupDataSetList as SDL |
|
19
|
|
|
from bika.lims.idserver import renameAfterCreation |
|
20
|
|
|
from bika.lims.interfaces import ISetupDataSetList |
|
21
|
|
|
from bika.lims.utils import getFromString |
|
22
|
|
|
from bika.lims.utils import t |
|
23
|
|
|
from bika.lims.utils import tmpID |
|
24
|
|
|
from bika.lims.utils import to_unicode |
|
25
|
|
|
from bika.lims.utils.analysis import create_analysis |
|
26
|
|
|
from Products.Archetypes.event import ObjectInitializedEvent |
|
27
|
|
|
from Products.CMFCore.utils import getToolByName |
|
28
|
|
|
from Products.CMFPlone.utils import _createObjectByType |
|
29
|
|
|
from Products.CMFPlone.utils import safe_unicode |
|
30
|
|
|
from zope.event import notify |
|
31
|
|
|
from zope.interface import implements |
|
32
|
|
|
|
|
33
|
|
|
|
|
34
|
|
|
def lookup(context, portal_type, **kwargs): |
|
35
|
|
|
at = getToolByName(context, 'archetype_tool') |
|
36
|
|
|
catalog = at.catalog_map.get(portal_type, [None])[0] or 'portal_catalog' |
|
37
|
|
|
catalog = getToolByName(context, catalog) |
|
38
|
|
|
kwargs['portal_type'] = portal_type |
|
39
|
|
|
return catalog(**kwargs)[0].getObject() |
|
40
|
|
|
|
|
41
|
|
|
|
|
42
|
|
|
def check_for_required_columns(name, data, required): |
|
43
|
|
|
for column in required: |
|
44
|
|
|
if not data.get(column, None): |
|
45
|
|
|
message = _("%s has no '%s' column." % (name, column)) |
|
46
|
|
|
raise Exception(t(message)) |
|
47
|
|
|
|
|
48
|
|
|
|
|
49
|
|
|
def Float(thing): |
|
50
|
|
|
try: |
|
51
|
|
|
f = float(thing) |
|
52
|
|
|
except ValueError: |
|
53
|
|
|
f = 0.0 |
|
54
|
|
|
return f |
|
55
|
|
|
|
|
56
|
|
|
|
|
57
|
|
|
def read_file(path): |
|
58
|
|
|
if os.path.isfile(path): |
|
59
|
|
|
return open(path, "rb").read() |
|
60
|
|
|
allowed_ext = ['pdf', 'jpg', 'jpeg', 'png', 'gif', 'ods', 'odt', |
|
61
|
|
|
'xlsx', 'doc', 'docx', 'xls', 'csv', 'txt'] |
|
62
|
|
|
allowed_ext += [e.upper() for e in allowed_ext] |
|
63
|
|
|
for e in allowed_ext: |
|
64
|
|
|
out = '%s.%s' % (path, e) |
|
65
|
|
|
if os.path.isfile(out): |
|
66
|
|
|
return open(out, "rb").read() |
|
67
|
|
|
raise IOError("File not found: %s. Allowed extensions: %s" % (path, ','.join(allowed_ext))) |
|
68
|
|
|
|
|
69
|
|
|
|
|
70
|
|
|
class SetupDataSetList(SDL): |
|
71
|
|
|
|
|
72
|
|
|
implements(ISetupDataSetList) |
|
73
|
|
|
|
|
74
|
|
|
def __call__(self): |
|
75
|
|
|
return SDL.__call__(self, projectname="bika.lims") |
|
76
|
|
|
|
|
77
|
|
|
|
|
78
|
|
|
class WorksheetImporter: |
|
79
|
|
|
|
|
80
|
|
|
"""Use this as a base, for normal tabular data sheet imports. |
|
81
|
|
|
""" |
|
82
|
|
|
|
|
83
|
|
|
def __init__(self, context): |
|
84
|
|
|
self.adapter_context = context |
|
85
|
|
|
|
|
86
|
|
|
def __call__(self, lsd, workbook, dataset_project, dataset_name): |
|
87
|
|
|
self.lsd = lsd |
|
88
|
|
|
self.context = lsd.context |
|
89
|
|
|
self.workbook = workbook |
|
90
|
|
|
self.sheetname = self.__class__.__name__.replace("_", " ") |
|
91
|
|
|
self.worksheet = workbook.get_sheet_by_name(self.sheetname) |
|
92
|
|
|
self.dataset_project = dataset_project |
|
93
|
|
|
self.dataset_name = dataset_name |
|
94
|
|
|
if self.worksheet: |
|
95
|
|
|
logger.info("Loading {0}.{1}: {2}".format( |
|
96
|
|
|
self.dataset_project, self.dataset_name, self.sheetname)) |
|
97
|
|
|
try: |
|
98
|
|
|
self.Import() |
|
99
|
|
|
except IOError: |
|
100
|
|
|
# The importer must omit the files not found inside the server filesystem (bika/lims/setupdata/test/ |
|
101
|
|
|
# if the file is loaded from 'select existing file' or bika/lims/setupdata/uploaded if it's loaded from |
|
102
|
|
|
# 'Load from file') and finishes the import without errors. https://jira.bikalabs.com/browse/LIMS-1624 |
|
103
|
|
|
warning = "Error while loading attached file from %s. The file will not be uploaded into the system." |
|
104
|
|
|
logger.warning(warning, self.sheetname) |
|
105
|
|
|
self.context.plone_utils.addPortalMessage("Error while loading some attached files. " |
|
106
|
|
|
"The files weren't uploaded into the system.") |
|
107
|
|
|
else: |
|
108
|
|
|
logger.info("No records found: '{0}'".format(self.sheetname)) |
|
109
|
|
|
|
|
110
|
|
|
def get_rows(self, startrow=3, worksheet=None): |
|
111
|
|
|
"""Returns a generator for all rows in a sheet. |
|
112
|
|
|
Each row contains a dictionary where the key is the value of the |
|
113
|
|
|
first row of the sheet for each column. |
|
114
|
|
|
The data values are returned in utf-8 format. |
|
115
|
|
|
Starts to consume data from startrow |
|
116
|
|
|
""" |
|
117
|
|
|
|
|
118
|
|
|
headers = [] |
|
119
|
|
|
row_nr = 0 |
|
120
|
|
|
worksheet = worksheet if worksheet else self.worksheet |
|
121
|
|
|
for row in worksheet.rows: # .iter_rows(): |
|
122
|
|
|
row_nr += 1 |
|
123
|
|
|
if row_nr == 1: |
|
124
|
|
|
# headers = [cell.internal_value for cell in row] |
|
125
|
|
|
headers = [cell.value for cell in row] |
|
126
|
|
|
continue |
|
127
|
|
|
if row_nr % 1000 == 0: |
|
128
|
|
|
transaction.savepoint() |
|
129
|
|
|
if row_nr <= startrow: |
|
130
|
|
|
continue |
|
131
|
|
|
# row = [_c(cell.internal_value).decode('utf-8') for cell in row] |
|
132
|
|
|
new_row = [] |
|
133
|
|
|
for cell in row: |
|
134
|
|
|
value = cell.value |
|
135
|
|
|
if value is None: |
|
136
|
|
|
value = '' |
|
137
|
|
|
if isinstance(value, unicode): |
|
138
|
|
|
value = value.encode('utf-8') |
|
139
|
|
|
# Strip any space, \t, \n, or \r characters from the left-hand |
|
140
|
|
|
# side, right-hand side, or both sides of the string |
|
141
|
|
|
if isinstance(value, str): |
|
142
|
|
|
value = value.strip(' \t\n\r') |
|
143
|
|
|
new_row.append(value) |
|
144
|
|
|
row = dict(zip(headers, new_row)) |
|
145
|
|
|
|
|
146
|
|
|
# parse out addresses |
|
147
|
|
|
for add_type in ['Physical', 'Postal', 'Billing']: |
|
148
|
|
|
row[add_type] = {} |
|
149
|
|
|
if add_type + "_Address" in row: |
|
150
|
|
|
for key in ['Address', 'City', 'State', 'District', 'Zip', 'Country']: |
|
151
|
|
|
row[add_type][key] = str(row.get("%s_%s" % (add_type, key), '')) |
|
152
|
|
|
|
|
153
|
|
|
yield row |
|
154
|
|
|
|
|
155
|
|
|
def get_file_data(self, filename): |
|
156
|
|
|
if filename: |
|
157
|
|
|
try: |
|
158
|
|
|
path = resource_filename( |
|
159
|
|
|
self.dataset_project, |
|
160
|
|
|
"setupdata/%s/%s" % (self.dataset_name, filename)) |
|
161
|
|
|
file_data = open(path, "rb").read() |
|
162
|
|
|
except: |
|
163
|
|
|
file_data = None |
|
164
|
|
|
else: |
|
165
|
|
|
file_data = None |
|
166
|
|
|
return file_data |
|
167
|
|
|
|
|
168
|
|
|
def to_bool(self, value): |
|
169
|
|
|
""" Converts a sheet string value to a boolean value. |
|
170
|
|
|
Needed because of utf-8 conversions |
|
171
|
|
|
""" |
|
172
|
|
|
|
|
173
|
|
|
try: |
|
174
|
|
|
value = value.lower() |
|
175
|
|
|
except: |
|
176
|
|
|
pass |
|
177
|
|
|
try: |
|
178
|
|
|
value = value.encode('utf-8') |
|
179
|
|
|
except: |
|
180
|
|
|
pass |
|
181
|
|
|
try: |
|
182
|
|
|
value = int(value) |
|
183
|
|
|
except: |
|
184
|
|
|
pass |
|
185
|
|
|
if value in ('true', 1): |
|
186
|
|
|
return True |
|
187
|
|
|
else: |
|
188
|
|
|
return False |
|
189
|
|
|
|
|
190
|
|
|
def to_int(self, value, default=0): |
|
191
|
|
|
""" Converts a value o a int. Returns default if the conversion fails. |
|
192
|
|
|
""" |
|
193
|
|
|
try: |
|
194
|
|
|
return int(value) |
|
195
|
|
|
except ValueError: |
|
196
|
|
|
try: |
|
197
|
|
|
return int(default) |
|
198
|
|
|
except: |
|
199
|
|
|
return 0 |
|
200
|
|
|
|
|
201
|
|
|
def to_float(self, value, default=0): |
|
202
|
|
|
""" Converts a value o a float. Returns default if the conversion fails. |
|
203
|
|
|
""" |
|
204
|
|
|
try: |
|
205
|
|
|
return float(value) |
|
206
|
|
|
except ValueError: |
|
207
|
|
|
try: |
|
208
|
|
|
return float(default) |
|
209
|
|
|
except: |
|
210
|
|
|
return 0.0 |
|
211
|
|
|
|
|
212
|
|
|
def defer(self, **kwargs): |
|
213
|
|
|
self.lsd.deferred.append(kwargs) |
|
214
|
|
|
|
|
215
|
|
|
def Import(self): |
|
216
|
|
|
""" Override this. |
|
217
|
|
|
XXX Simple generic sheet importer |
|
218
|
|
|
""" |
|
219
|
|
|
|
|
220
|
|
|
def fill_addressfields(self, row, obj): |
|
221
|
|
|
""" Fills the address fields for the specified object if allowed: |
|
222
|
|
|
PhysicalAddress, PostalAddress, CountryState, BillingAddress |
|
223
|
|
|
""" |
|
224
|
|
|
addresses = {} |
|
225
|
|
|
for add_type in ['Physical', 'Postal', 'Billing', 'CountryState']: |
|
226
|
|
|
addresses[add_type] = {} |
|
227
|
|
|
for key in ['Address', 'City', 'State', 'District', 'Zip', 'Country']: |
|
228
|
|
|
addresses[add_type][key.lower()] = str(row.get("%s_%s" % (add_type, key), '')) |
|
229
|
|
|
|
|
230
|
|
|
if addresses['CountryState']['country'] == '' \ |
|
231
|
|
|
and addresses['CountryState']['state'] == '': |
|
232
|
|
|
addresses['CountryState']['country'] = addresses['Physical']['country'] |
|
233
|
|
|
addresses['CountryState']['state'] = addresses['Physical']['state'] |
|
234
|
|
|
|
|
235
|
|
|
if hasattr(obj, 'setPhysicalAddress'): |
|
236
|
|
|
obj.setPhysicalAddress(addresses['Physical']) |
|
237
|
|
|
if hasattr(obj, 'setPostalAddress'): |
|
238
|
|
|
obj.setPostalAddress(addresses['Postal']) |
|
239
|
|
|
if hasattr(obj, 'setCountryState'): |
|
240
|
|
|
obj.setCountryState(addresses['CountryState']) |
|
241
|
|
|
if hasattr(obj, 'setBillingAddress'): |
|
242
|
|
|
obj.setBillingAddress(addresses['Billing']) |
|
243
|
|
|
|
|
244
|
|
|
def fill_contactfields(self, row, obj): |
|
245
|
|
|
""" Fills the contact fields for the specified object if allowed: |
|
246
|
|
|
EmailAddress, Phone, Fax, BusinessPhone, BusinessFax, HomePhone, |
|
247
|
|
|
MobilePhone |
|
248
|
|
|
""" |
|
249
|
|
|
fieldnames = ['EmailAddress', |
|
250
|
|
|
'Phone', |
|
251
|
|
|
'Fax', |
|
252
|
|
|
'BusinessPhone', |
|
253
|
|
|
'BusinessFax', |
|
254
|
|
|
'HomePhone', |
|
255
|
|
|
'MobilePhone', |
|
256
|
|
|
] |
|
257
|
|
|
schema = obj.Schema() |
|
258
|
|
|
fields = dict([(field.getName(), field) for field in schema.fields()]) |
|
259
|
|
|
for fieldname in fieldnames: |
|
260
|
|
|
try: |
|
261
|
|
|
field = fields[fieldname] |
|
262
|
|
|
except: |
|
263
|
|
|
if fieldname in row: |
|
264
|
|
|
logger.info("Address field %s not found on %s"%(fieldname,obj)) |
|
265
|
|
|
continue |
|
266
|
|
|
value = row.get(fieldname, '') |
|
267
|
|
|
field.set(obj, value) |
|
268
|
|
|
|
|
269
|
|
|
def get_object(self, catalog, portal_type, title=None, **kwargs): |
|
270
|
|
|
"""This will return an object from the catalog. |
|
271
|
|
|
Logs a message and returns None if no object or multiple objects found. |
|
272
|
|
|
All keyword arguments are passed verbatim to the contentFilter |
|
273
|
|
|
""" |
|
274
|
|
|
if not title and not kwargs: |
|
275
|
|
|
return None |
|
276
|
|
|
contentFilter = {"portal_type": portal_type} |
|
277
|
|
|
if title: |
|
278
|
|
|
contentFilter['title'] = to_unicode(title) |
|
279
|
|
|
contentFilter.update(kwargs) |
|
280
|
|
|
brains = catalog(contentFilter) |
|
281
|
|
|
if len(brains) > 1: |
|
282
|
|
|
logger.info("More than one object found for %s" % contentFilter) |
|
283
|
|
|
return None |
|
284
|
|
|
elif len(brains) == 0: |
|
285
|
|
|
if portal_type == 'AnalysisService': |
|
286
|
|
|
brains = catalog(portal_type=portal_type, getKeyword=title) |
|
287
|
|
|
if brains: |
|
288
|
|
|
return brains[0].getObject() |
|
289
|
|
|
logger.info("No objects found for %s" % contentFilter) |
|
290
|
|
|
return None |
|
291
|
|
|
else: |
|
292
|
|
|
return brains[0].getObject() |
|
293
|
|
|
|
|
294
|
|
|
|
|
295
|
|
|
class Sub_Groups(WorksheetImporter): |
|
296
|
|
|
|
|
297
|
|
|
def Import(self): |
|
298
|
|
|
folder = self.context.bika_setup.bika_subgroups |
|
299
|
|
|
for row in self.get_rows(3): |
|
300
|
|
|
if 'title' in row and row['title']: |
|
301
|
|
|
obj = _createObjectByType("SubGroup", folder, tmpID()) |
|
302
|
|
|
obj.edit(title=row['title'], |
|
303
|
|
|
description=row['description'], |
|
304
|
|
|
SortKey=row['SortKey']) |
|
305
|
|
|
obj.unmarkCreationFlag() |
|
306
|
|
|
renameAfterCreation(obj) |
|
307
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
308
|
|
|
|
|
309
|
|
|
|
|
310
|
|
|
class Lab_Information(WorksheetImporter): |
|
311
|
|
|
|
|
312
|
|
|
def Import(self): |
|
313
|
|
|
laboratory = self.context.bika_setup.laboratory |
|
314
|
|
|
values = {} |
|
315
|
|
|
for row in self.get_rows(3): |
|
316
|
|
|
values[row['Field']] = row['Value'] |
|
317
|
|
|
|
|
318
|
|
|
if values['AccreditationBodyLogo']: |
|
319
|
|
|
path = resource_filename( |
|
320
|
|
|
self.dataset_project, |
|
321
|
|
|
"setupdata/%s/%s" % (self.dataset_name, |
|
322
|
|
|
values['AccreditationBodyLogo'])) |
|
323
|
|
|
try: |
|
324
|
|
|
file_data = read_file(path) |
|
325
|
|
|
except Exception as msg: |
|
326
|
|
|
file_data = None |
|
327
|
|
|
logger.warning(msg[0] + " Error on sheet: " + self.sheetname) |
|
328
|
|
|
else: |
|
329
|
|
|
file_data = None |
|
330
|
|
|
|
|
331
|
|
|
laboratory.edit( |
|
332
|
|
|
Name=values['Name'], |
|
333
|
|
|
LabURL=values['LabURL'], |
|
334
|
|
|
Confidence=values['Confidence'], |
|
335
|
|
|
LaboratoryAccredited=self.to_bool(values['LaboratoryAccredited']), |
|
336
|
|
|
AccreditationBodyLong=values['AccreditationBodyLong'], |
|
337
|
|
|
AccreditationBody=values['AccreditationBody'], |
|
338
|
|
|
AccreditationBodyURL=values['AccreditationBodyURL'], |
|
339
|
|
|
Accreditation=values['Accreditation'], |
|
340
|
|
|
AccreditationReference=values['AccreditationReference'], |
|
341
|
|
|
AccreditationBodyLogo=file_data, |
|
342
|
|
|
TaxNumber=values['TaxNumber'], |
|
343
|
|
|
) |
|
344
|
|
|
self.fill_contactfields(values, laboratory) |
|
345
|
|
|
self.fill_addressfields(values, laboratory) |
|
346
|
|
|
|
|
347
|
|
|
|
|
348
|
|
|
class Lab_Contacts(WorksheetImporter): |
|
349
|
|
|
|
|
350
|
|
|
def Import(self): |
|
351
|
|
|
folder = self.context.bika_setup.bika_labcontacts |
|
352
|
|
|
portal_groups = getToolByName(self.context, 'portal_groups') |
|
353
|
|
|
portal_registration = getToolByName( |
|
354
|
|
|
self.context, 'portal_registration') |
|
355
|
|
|
rownum = 2 |
|
356
|
|
|
for row in self.get_rows(3): |
|
357
|
|
|
rownum+=1 |
|
358
|
|
|
if not row.get('Firstname',None): |
|
359
|
|
|
continue |
|
360
|
|
|
|
|
361
|
|
|
# Username already exists? |
|
362
|
|
|
username = row.get('Username','') |
|
363
|
|
|
fullname = ('%s %s' % (row['Firstname'], row.get('Surname', ''))).strip() |
|
364
|
|
|
if username: |
|
365
|
|
|
username = safe_unicode(username).encode('utf-8') |
|
366
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
367
|
|
|
exists = [o.getObject() for o in bsc(portal_type="LabContact") if o.getObject().getUsername()==username] |
|
368
|
|
|
if exists: |
|
369
|
|
|
error = "Lab Contact: username '{0}' in row {1} already exists. This contact will be omitted.".format(username, str(rownum)) |
|
370
|
|
|
logger.error(error) |
|
371
|
|
|
continue |
|
372
|
|
|
|
|
373
|
|
|
# Is there a signature file defined? Try to get the file first. |
|
374
|
|
|
signature = None |
|
375
|
|
|
if row.get('Signature'): |
|
376
|
|
|
signature = self.get_file_data(row['Signature']) |
|
377
|
|
|
if not signature: |
|
378
|
|
|
warning = "Lab Contact: Cannot load the signature file '{0}' for user '{1}'. The contact will be created, but without a signature image".format(row['Signature'], username) |
|
379
|
|
|
logger.warning(warning) |
|
380
|
|
|
|
|
381
|
|
|
obj = _createObjectByType("LabContact", folder, tmpID()) |
|
382
|
|
|
obj.edit( |
|
383
|
|
|
title=fullname, |
|
384
|
|
|
Salutation=row.get('Salutation', ''), |
|
385
|
|
|
Firstname=row['Firstname'], |
|
386
|
|
|
Surname=row.get('Surname', ''), |
|
387
|
|
|
JobTitle=row.get('JobTitle', ''), |
|
388
|
|
|
Username=row.get('Username', ''), |
|
389
|
|
|
Signature=signature |
|
390
|
|
|
) |
|
391
|
|
|
obj.unmarkCreationFlag() |
|
392
|
|
|
renameAfterCreation(obj) |
|
393
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
394
|
|
|
self.fill_contactfields(row, obj) |
|
395
|
|
|
self.fill_addressfields(row, obj) |
|
396
|
|
|
|
|
397
|
|
|
if row['Department_title']: |
|
398
|
|
|
self.defer(src_obj=obj, |
|
399
|
|
|
src_field='Department', |
|
400
|
|
|
dest_catalog='bika_setup_catalog', |
|
401
|
|
|
dest_query={'portal_type': 'Department', |
|
402
|
|
|
'title': row['Department_title']} |
|
403
|
|
|
) |
|
404
|
|
|
|
|
405
|
|
|
# Create Plone user |
|
406
|
|
|
if not row['Username']: |
|
407
|
|
|
warn = "Lab Contact: No username defined for user '{0}' in row {1}. Contact created, but without access credentials.".format(fullname, str(rownum)) |
|
408
|
|
|
logger.warning(warn) |
|
409
|
|
|
if not row.get('EmailAddress', ''): |
|
410
|
|
|
warn = "Lab Contact: No Email defined for user '{0}' in row {1}. Contact created, but without access credentials.".format(fullname, str(rownum)) |
|
411
|
|
|
logger.warning(warn) |
|
412
|
|
|
|
|
413
|
|
|
if(row['Username'] and row.get('EmailAddress','')): |
|
414
|
|
|
username = safe_unicode(row['Username']).encode('utf-8') |
|
415
|
|
|
passw = row['Password'] |
|
416
|
|
|
if not passw: |
|
417
|
|
|
warn = "Lab Contact: No password defined for user '{0}' in row {1}. Password established automatically to '{3}'".format(username, str(rownum), username) |
|
418
|
|
|
logger.warning(warn) |
|
419
|
|
|
passw = username |
|
420
|
|
|
|
|
421
|
|
|
try: |
|
422
|
|
|
member = portal_registration.addMember( |
|
423
|
|
|
username, |
|
424
|
|
|
passw, |
|
425
|
|
|
properties={ |
|
426
|
|
|
'username': username, |
|
427
|
|
|
'email': row['EmailAddress'], |
|
428
|
|
|
'fullname': fullname} |
|
429
|
|
|
) |
|
430
|
|
|
except Exception as msg: |
|
431
|
|
|
logger.error("Client Contact: Error adding user (%s): %s" % (msg, username)) |
|
432
|
|
|
continue |
|
433
|
|
|
|
|
434
|
|
|
groups = row.get('Groups', '') |
|
435
|
|
|
if not groups: |
|
436
|
|
|
warn = "Lab Contact: No groups defined for user '{0}' in row {1}. Group established automatically to 'Analysts'".format(username, str(rownum)) |
|
437
|
|
|
logger.warning(warn) |
|
438
|
|
|
groups = 'Analysts' |
|
439
|
|
|
|
|
440
|
|
|
group_ids = [g.strip() for g in groups.split(',')] |
|
441
|
|
|
# Add user to all specified groups |
|
442
|
|
|
for group_id in group_ids: |
|
443
|
|
|
group = portal_groups.getGroupById(group_id) |
|
444
|
|
|
if group: |
|
445
|
|
|
group.addMember(username) |
|
446
|
|
|
roles = row.get('Roles', '') |
|
447
|
|
|
if roles: |
|
448
|
|
|
role_ids = [r.strip() for r in roles.split(',')] |
|
449
|
|
|
# Add user to all specified roles |
|
450
|
|
|
for role_id in role_ids: |
|
451
|
|
|
member._addRole(role_id) |
|
452
|
|
|
# If user is in LabManagers, add Owner local role on clients |
|
453
|
|
|
# folder |
|
454
|
|
|
if 'LabManager' in group_ids: |
|
455
|
|
|
self.context.clients.manage_setLocalRoles( |
|
456
|
|
|
username, ['Owner', ]) |
|
457
|
|
|
|
|
458
|
|
|
# Now we have the lab contacts registered, try to assign the managers |
|
459
|
|
|
# to each department if required |
|
460
|
|
|
sheet = self.workbook.get_sheet_by_name("Lab Departments") |
|
461
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
462
|
|
|
for row in self.get_rows(3, sheet): |
|
463
|
|
|
if row['title'] and row['LabContact_Username']: |
|
464
|
|
|
dept = self.get_object(bsc, "Department", row.get('title')) |
|
465
|
|
|
if dept and not dept.getManager(): |
|
466
|
|
|
username = safe_unicode(row['LabContact_Username']).encode('utf-8') |
|
467
|
|
|
exists = [o.getObject() for o in bsc(portal_type="LabContact") if o.getObject().getUsername()==username] |
|
468
|
|
|
if exists: |
|
469
|
|
|
dept.setManager(exists[0].UID()) |
|
470
|
|
|
|
|
471
|
|
|
class Lab_Departments(WorksheetImporter): |
|
472
|
|
|
|
|
473
|
|
|
def Import(self): |
|
474
|
|
|
folder = self.context.bika_setup.bika_departments |
|
475
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
476
|
|
|
lab_contacts = [o.getObject() for o in bsc(portal_type="LabContact")] |
|
477
|
|
|
for row in self.get_rows(3): |
|
478
|
|
|
if row['title']: |
|
479
|
|
|
obj = _createObjectByType("Department", folder, tmpID()) |
|
480
|
|
|
obj.edit(title=row['title'], |
|
481
|
|
|
description=row.get('description', '')) |
|
482
|
|
|
manager = None |
|
483
|
|
|
for contact in lab_contacts: |
|
484
|
|
|
if contact.getUsername() == row['LabContact_Username']: |
|
485
|
|
|
manager = contact |
|
486
|
|
|
break |
|
487
|
|
|
if manager: |
|
488
|
|
|
obj.setManager(manager.UID()) |
|
489
|
|
|
else: |
|
490
|
|
|
message = "Department: lookup of '%s' in LabContacts/Username failed." % row[ |
|
491
|
|
|
'LabContact_Username'] |
|
492
|
|
|
logger.info(message) |
|
493
|
|
|
obj.unmarkCreationFlag() |
|
494
|
|
|
renameAfterCreation(obj) |
|
495
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
496
|
|
|
|
|
497
|
|
|
|
|
498
|
|
|
class Lab_Products(WorksheetImporter): |
|
499
|
|
|
|
|
500
|
|
|
def Import(self): |
|
501
|
|
|
context = self.context |
|
502
|
|
|
# Refer to the default folder |
|
503
|
|
|
folder = self.context.bika_setup.bika_labproducts |
|
504
|
|
|
# Iterate through the rows |
|
505
|
|
|
for row in self.get_rows(3): |
|
506
|
|
|
# Create the SRTemplate object |
|
507
|
|
|
obj = _createObjectByType('LabProduct', folder, tmpID()) |
|
508
|
|
|
# Apply the row values |
|
509
|
|
|
obj.edit( |
|
510
|
|
|
title=row.get('title', 'Unknown'), |
|
511
|
|
|
description=row.get('description', ''), |
|
512
|
|
|
Volume=row.get('volume', 0), |
|
513
|
|
|
Unit=str(row.get('unit', 0)), |
|
514
|
|
|
Price=str(row.get('price', 0)), |
|
515
|
|
|
) |
|
516
|
|
|
# Rename the new object |
|
517
|
|
|
renameAfterCreation(obj) |
|
518
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
519
|
|
|
|
|
520
|
|
|
|
|
521
|
|
|
class Clients(WorksheetImporter): |
|
522
|
|
|
|
|
523
|
|
|
def Import(self): |
|
524
|
|
|
folder = self.context.clients |
|
525
|
|
|
for row in self.get_rows(3): |
|
526
|
|
|
obj = _createObjectByType("Client", folder, tmpID()) |
|
527
|
|
|
if not row['Name']: |
|
528
|
|
|
message = "Client %s has no Name" |
|
529
|
|
|
raise Exception(message) |
|
530
|
|
|
if not row['ClientID']: |
|
531
|
|
|
message = "Client %s has no Client ID" |
|
532
|
|
|
raise Exception(message) |
|
533
|
|
|
obj.edit(Name=row['Name'], |
|
534
|
|
|
ClientID=row['ClientID'], |
|
535
|
|
|
MemberDiscountApplies=row[ |
|
536
|
|
|
'MemberDiscountApplies'] and True or False, |
|
537
|
|
|
BulkDiscount=row['BulkDiscount'] and True or False, |
|
538
|
|
|
TaxNumber=row.get('TaxNumber', ''), |
|
539
|
|
|
AccountNumber=row.get('AccountNumber', '') |
|
540
|
|
|
) |
|
541
|
|
|
self.fill_contactfields(row, obj) |
|
542
|
|
|
self.fill_addressfields(row, obj) |
|
543
|
|
|
obj.unmarkCreationFlag() |
|
544
|
|
|
renameAfterCreation(obj) |
|
545
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
546
|
|
|
|
|
547
|
|
|
|
|
548
|
|
|
class Client_Contacts(WorksheetImporter): |
|
549
|
|
|
|
|
550
|
|
|
def Import(self): |
|
551
|
|
|
portal_groups = getToolByName(self.context, 'portal_groups') |
|
552
|
|
|
pc = getToolByName(self.context, 'portal_catalog') |
|
553
|
|
|
for row in self.get_rows(3): |
|
554
|
|
|
client = pc(portal_type="Client", |
|
555
|
|
|
getName=row['Client_title']) |
|
556
|
|
|
if len(client) == 0: |
|
557
|
|
|
client_contact = "%(Firstname)s %(Surname)s" % row |
|
558
|
|
|
error = "Client invalid: '%s'. The Client Contact %s will not be uploaded." |
|
559
|
|
|
logger.error(error, row['Client_title'], client_contact) |
|
560
|
|
|
continue |
|
561
|
|
|
client = client[0].getObject() |
|
562
|
|
|
contact = _createObjectByType("Contact", client, tmpID()) |
|
563
|
|
|
fullname = "%(Firstname)s %(Surname)s" % row |
|
564
|
|
|
pub_pref = [x.strip() for x in |
|
565
|
|
|
row.get('PublicationPreference', '').split(",")] |
|
566
|
|
|
contact.edit( |
|
567
|
|
|
Salutation=row.get('Salutation', ''), |
|
568
|
|
|
Firstname=row.get('Firstname', ''), |
|
569
|
|
|
Surname=row.get('Surname', ''), |
|
570
|
|
|
Username=row['Username'], |
|
571
|
|
|
JobTitle=row.get('JobTitle', ''), |
|
572
|
|
|
Department=row.get('Department', ''), |
|
573
|
|
|
PublicationPreference=pub_pref, |
|
574
|
|
|
AttachmentsPermitted=row[ |
|
575
|
|
|
'AttachmentsPermitted'] and True or False, |
|
576
|
|
|
) |
|
577
|
|
|
self.fill_contactfields(row, contact) |
|
578
|
|
|
self.fill_addressfields(row, contact) |
|
579
|
|
|
contact.unmarkCreationFlag() |
|
580
|
|
|
renameAfterCreation(contact) |
|
581
|
|
|
notify(ObjectInitializedEvent(contact)) |
|
582
|
|
|
# CC Contacts |
|
583
|
|
|
if row['CCContacts']: |
|
584
|
|
|
names = [x.strip() for x in row['CCContacts'].split(",")] |
|
585
|
|
|
for _fullname in names: |
|
586
|
|
|
self.defer(src_obj=contact, |
|
587
|
|
|
src_field='CCContact', |
|
588
|
|
|
dest_catalog='portal_catalog', |
|
589
|
|
|
dest_query={'portal_type': 'Contact', |
|
590
|
|
|
'getFullname': _fullname} |
|
591
|
|
|
) |
|
592
|
|
|
## Create Plone user |
|
593
|
|
|
username = safe_unicode(row['Username']).encode('utf-8') |
|
594
|
|
|
password = safe_unicode(row['Password']).encode('utf-8') |
|
595
|
|
|
if(username): |
|
596
|
|
|
try: |
|
597
|
|
|
member = self.context.portal_registration.addMember( |
|
598
|
|
|
username, |
|
599
|
|
|
password, |
|
600
|
|
|
properties={ |
|
601
|
|
|
'username': username, |
|
602
|
|
|
'email': row['EmailAddress'], |
|
603
|
|
|
'fullname': fullname} |
|
604
|
|
|
) |
|
605
|
|
|
except Exception as msg: |
|
606
|
|
|
logger.info("Error adding user (%s): %s" % (msg, username)) |
|
607
|
|
|
contact.aq_parent.manage_setLocalRoles(row['Username'], ['Owner', ]) |
|
608
|
|
|
contact.reindexObject() |
|
609
|
|
|
# add user to Clients group |
|
610
|
|
|
group = portal_groups.getGroupById('Clients') |
|
611
|
|
|
group.addMember(username) |
|
612
|
|
|
|
|
613
|
|
|
|
|
614
|
|
|
class Container_Types(WorksheetImporter): |
|
615
|
|
|
|
|
616
|
|
View Code Duplication |
def Import(self): |
|
|
|
|
|
|
617
|
|
|
folder = self.context.bika_setup.bika_containertypes |
|
618
|
|
|
for row in self.get_rows(3): |
|
619
|
|
|
if not row['title']: |
|
620
|
|
|
continue |
|
621
|
|
|
obj = _createObjectByType("ContainerType", folder, tmpID()) |
|
622
|
|
|
obj.edit(title=row['title'], |
|
623
|
|
|
description=row.get('description', '')) |
|
624
|
|
|
obj.unmarkCreationFlag() |
|
625
|
|
|
renameAfterCreation(obj) |
|
626
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
627
|
|
|
|
|
628
|
|
|
|
|
629
|
|
|
class Preservations(WorksheetImporter): |
|
630
|
|
|
|
|
631
|
|
|
def Import(self): |
|
632
|
|
|
folder = self.context.bika_setup.bika_preservations |
|
633
|
|
|
for row in self.get_rows(3): |
|
634
|
|
|
if not row['title']: |
|
635
|
|
|
continue |
|
636
|
|
|
obj = _createObjectByType("Preservation", folder, tmpID()) |
|
637
|
|
|
RP = { |
|
638
|
|
|
'days': int(row['RetentionPeriod_days'] and row['RetentionPeriod_days'] or 0), |
|
639
|
|
|
'hours': int(row['RetentionPeriod_hours'] and row['RetentionPeriod_hours'] or 0), |
|
640
|
|
|
'minutes': int(row['RetentionPeriod_minutes'] and row['RetentionPeriod_minutes'] or 0), |
|
641
|
|
|
} |
|
642
|
|
|
|
|
643
|
|
|
obj.edit(title=row['title'], |
|
644
|
|
|
description=row.get('description', ''), |
|
645
|
|
|
RetentionPeriod=RP) |
|
646
|
|
|
obj.unmarkCreationFlag() |
|
647
|
|
|
renameAfterCreation(obj) |
|
648
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
649
|
|
|
|
|
650
|
|
|
|
|
651
|
|
|
class Containers(WorksheetImporter): |
|
652
|
|
|
|
|
653
|
|
|
def Import(self): |
|
654
|
|
|
folder = self.context.bika_setup.bika_containers |
|
655
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
656
|
|
|
for row in self.get_rows(3): |
|
657
|
|
|
if not row['title']: |
|
658
|
|
|
continue |
|
659
|
|
|
obj = _createObjectByType("Container", folder, tmpID()) |
|
660
|
|
|
obj.edit( |
|
661
|
|
|
title=row['title'], |
|
662
|
|
|
description=row.get('description', ''), |
|
663
|
|
|
Capacity=row.get('Capacity', 0), |
|
664
|
|
|
PrePreserved=self.to_bool(row['PrePreserved']) |
|
665
|
|
|
) |
|
666
|
|
|
if row['ContainerType_title']: |
|
667
|
|
|
ct = self.get_object(bsc, 'ContainerType', row.get('ContainerType_title','')) |
|
668
|
|
|
if ct: |
|
669
|
|
|
obj.setContainerType(ct) |
|
670
|
|
|
if row['Preservation_title']: |
|
671
|
|
|
pres = self.get_object(bsc, 'Preservation',row.get('Preservation_title','')) |
|
672
|
|
|
if pres: |
|
673
|
|
|
obj.setPreservation(pres) |
|
674
|
|
|
obj.unmarkCreationFlag() |
|
675
|
|
|
renameAfterCreation(obj) |
|
676
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
677
|
|
|
|
|
678
|
|
|
|
|
679
|
|
|
class Suppliers(WorksheetImporter): |
|
680
|
|
|
|
|
681
|
|
|
def Import(self): |
|
682
|
|
|
folder = self.context.bika_setup.bika_suppliers |
|
683
|
|
|
for row in self.get_rows(3): |
|
684
|
|
|
obj = _createObjectByType("Supplier", folder, tmpID()) |
|
685
|
|
|
if row['Name']: |
|
686
|
|
|
obj.edit( |
|
687
|
|
|
Name=row.get('Name', ''), |
|
688
|
|
|
TaxNumber=row.get('TaxNumber', ''), |
|
689
|
|
|
AccountType=row.get('AccountType', {}), |
|
690
|
|
|
AccountName=row.get('AccountName', {}), |
|
691
|
|
|
AccountNumber=row.get('AccountNumber', ''), |
|
692
|
|
|
BankName=row.get('BankName', ''), |
|
693
|
|
|
BankBranch=row.get('BankBranch', ''), |
|
694
|
|
|
SWIFTcode=row.get('SWIFTcode', ''), |
|
695
|
|
|
IBN=row.get('IBN', ''), |
|
696
|
|
|
NIB=row.get('NIB', ''), |
|
697
|
|
|
Website=row.get('Website', ''), |
|
698
|
|
|
) |
|
699
|
|
|
self.fill_contactfields(row, obj) |
|
700
|
|
|
self.fill_addressfields(row, obj) |
|
701
|
|
|
obj.unmarkCreationFlag() |
|
702
|
|
|
renameAfterCreation(obj) |
|
703
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
704
|
|
|
|
|
705
|
|
|
|
|
706
|
|
|
class Supplier_Contacts(WorksheetImporter): |
|
707
|
|
|
|
|
708
|
|
|
def Import(self): |
|
709
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
710
|
|
|
for row in self.get_rows(3): |
|
711
|
|
|
if not row['Supplier_Name']: |
|
712
|
|
|
continue |
|
713
|
|
|
if not row['Firstname']: |
|
714
|
|
|
continue |
|
715
|
|
|
folder = bsc(portal_type="Supplier", |
|
716
|
|
|
Title=row['Supplier_Name']) |
|
717
|
|
|
if not folder: |
|
718
|
|
|
continue |
|
719
|
|
|
folder = folder[0].getObject() |
|
720
|
|
|
obj = _createObjectByType("SupplierContact", folder, tmpID()) |
|
721
|
|
|
obj.edit( |
|
722
|
|
|
Firstname=row['Firstname'], |
|
723
|
|
|
Surname=row.get('Surname', ''), |
|
724
|
|
|
Username=row.get('Username') |
|
725
|
|
|
) |
|
726
|
|
|
self.fill_contactfields(row, obj) |
|
727
|
|
|
self.fill_addressfields(row, obj) |
|
728
|
|
|
obj.unmarkCreationFlag() |
|
729
|
|
|
renameAfterCreation(obj) |
|
730
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
731
|
|
|
|
|
732
|
|
|
|
|
733
|
|
|
class Manufacturers(WorksheetImporter): |
|
734
|
|
|
|
|
735
|
|
|
def Import(self): |
|
736
|
|
|
folder = self.context.bika_setup.bika_manufacturers |
|
737
|
|
|
for row in self.get_rows(3): |
|
738
|
|
|
obj = _createObjectByType("Manufacturer", folder, tmpID()) |
|
739
|
|
|
if row['title']: |
|
740
|
|
|
obj.edit( |
|
741
|
|
|
title=row['title'], |
|
742
|
|
|
description=row.get('description', '') |
|
743
|
|
|
) |
|
744
|
|
|
self.fill_addressfields(row, obj) |
|
745
|
|
|
obj.unmarkCreationFlag() |
|
746
|
|
|
renameAfterCreation(obj) |
|
747
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
748
|
|
|
|
|
749
|
|
|
|
|
750
|
|
|
class Instrument_Types(WorksheetImporter): |
|
751
|
|
|
|
|
752
|
|
|
def Import(self): |
|
753
|
|
|
folder = self.context.bika_setup.bika_instrumenttypes |
|
754
|
|
|
for row in self.get_rows(3): |
|
755
|
|
|
obj = _createObjectByType("InstrumentType", folder, tmpID()) |
|
756
|
|
|
obj.edit( |
|
757
|
|
|
title=row['title'], |
|
758
|
|
|
description=row.get('description', '')) |
|
759
|
|
|
obj.unmarkCreationFlag() |
|
760
|
|
|
renameAfterCreation(obj) |
|
761
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
762
|
|
|
|
|
763
|
|
|
|
|
764
|
|
|
class Instruments(WorksheetImporter): |
|
765
|
|
|
|
|
766
|
|
|
def Import(self): |
|
767
|
|
|
folder = self.context.bika_setup.bika_instruments |
|
768
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
769
|
|
|
pc = getToolByName(self.context, 'portal_catalog') |
|
770
|
|
|
for row in self.get_rows(3): |
|
771
|
|
|
if ('Type' not in row |
|
772
|
|
|
or 'Supplier' not in row |
|
773
|
|
|
or 'Brand' not in row): |
|
774
|
|
|
logger.info("Unable to import '%s'. Missing supplier, manufacturer or type" % row.get('title','')) |
|
775
|
|
|
continue |
|
776
|
|
|
|
|
777
|
|
|
obj = _createObjectByType("Instrument", folder, tmpID()) |
|
778
|
|
|
|
|
779
|
|
|
obj.edit( |
|
780
|
|
|
title=row.get('title', ''), |
|
781
|
|
|
AssetNumber=row.get('assetnumber', ''), |
|
782
|
|
|
description=row.get('description', ''), |
|
783
|
|
|
Type=row.get('Type', ''), |
|
784
|
|
|
Brand=row.get('Brand', ''), |
|
785
|
|
|
Model=row.get('Model', ''), |
|
786
|
|
|
SerialNo=row.get('SerialNo', ''), |
|
787
|
|
|
DataInterface=row.get('DataInterface', ''), |
|
788
|
|
|
Location=row.get('Location', ''), |
|
789
|
|
|
InstallationDate=row.get('Instalationdate', ''), |
|
790
|
|
|
UserManualID=row.get('UserManualID', ''), |
|
791
|
|
|
) |
|
792
|
|
|
instrumenttype = self.get_object(bsc, 'InstrumentType', title=row.get('Type')) |
|
793
|
|
|
manufacturer = self.get_object(bsc, 'Manufacturer', title=row.get('Brand')) |
|
794
|
|
|
supplier = self.get_object(bsc, 'Supplier', getName=row.get('Supplier', '')) |
|
795
|
|
|
method = self.get_object(pc, 'Method', title=row.get('Method')) |
|
796
|
|
|
obj.setInstrumentType(instrumenttype) |
|
797
|
|
|
obj.setManufacturer(manufacturer) |
|
798
|
|
|
obj.setSupplier(supplier) |
|
799
|
|
|
if method: |
|
800
|
|
|
obj.setMethods([method]) |
|
801
|
|
|
obj.setMethod(method) |
|
802
|
|
|
|
|
803
|
|
|
# Attaching the instrument's photo |
|
804
|
|
View Code Duplication |
if row.get('Photo', None): |
|
|
|
|
|
|
805
|
|
|
path = resource_filename( |
|
806
|
|
|
self.dataset_project, |
|
807
|
|
|
"setupdata/%s/%s" % (self.dataset_name, |
|
808
|
|
|
row['Photo']) |
|
809
|
|
|
) |
|
810
|
|
|
try: |
|
811
|
|
|
file_data = read_file(path) |
|
812
|
|
|
obj.setPhoto(file_data) |
|
813
|
|
|
except Exception as msg: |
|
814
|
|
|
file_data = None |
|
815
|
|
|
logger.warning(msg[0] + " Error on sheet: " + self.sheetname) |
|
816
|
|
|
|
|
817
|
|
|
# Attaching the Installation Certificate if exists |
|
818
|
|
View Code Duplication |
if row.get('InstalationCertificate', None): |
|
|
|
|
|
|
819
|
|
|
path = resource_filename( |
|
820
|
|
|
self.dataset_project, |
|
821
|
|
|
"setupdata/%s/%s" % (self.dataset_name, |
|
822
|
|
|
row['InstalationCertificate']) |
|
823
|
|
|
) |
|
824
|
|
|
try: |
|
825
|
|
|
file_data = read_file(path) |
|
826
|
|
|
obj.setInstallationCertificate(file_data) |
|
827
|
|
|
except Exception as msg: |
|
828
|
|
|
logger.warning(msg[0] + " Error on sheet: " + self.sheetname) |
|
829
|
|
|
|
|
830
|
|
|
# Attaching the Instrument's manual if exists |
|
831
|
|
|
if row.get('UserManualFile', None): |
|
832
|
|
|
row_dict = {'DocumentID': row.get('UserManualID', 'manual'), |
|
833
|
|
|
'DocumentVersion': '', |
|
834
|
|
|
'DocumentLocation': '', |
|
835
|
|
|
'DocumentType': 'Manual', |
|
836
|
|
|
'File': row.get('UserManualFile', None) |
|
837
|
|
|
} |
|
838
|
|
|
addDocument(self, row_dict, obj) |
|
839
|
|
|
obj.unmarkCreationFlag() |
|
840
|
|
|
renameAfterCreation(obj) |
|
841
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
842
|
|
|
|
|
843
|
|
|
|
|
844
|
|
View Code Duplication |
class Instrument_Validations(WorksheetImporter): |
|
|
|
|
|
|
845
|
|
|
|
|
846
|
|
|
def Import(self): |
|
847
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
848
|
|
|
for row in self.get_rows(3): |
|
849
|
|
|
if not row.get('instrument', None) or not row.get('title', None): |
|
850
|
|
|
continue |
|
851
|
|
|
|
|
852
|
|
|
folder = self.get_object(bsc, 'Instrument', row.get('instrument')) |
|
853
|
|
|
if folder: |
|
854
|
|
|
obj = _createObjectByType("InstrumentValidation", folder, tmpID()) |
|
855
|
|
|
obj.edit( |
|
856
|
|
|
title=row['title'], |
|
857
|
|
|
DownFrom=row.get('downfrom', ''), |
|
858
|
|
|
DownTo=row.get('downto', ''), |
|
859
|
|
|
Validator=row.get('validator', ''), |
|
860
|
|
|
Considerations=row.get('considerations', ''), |
|
861
|
|
|
WorkPerformed=row.get('workperformed', ''), |
|
862
|
|
|
Remarks=row.get('remarks', ''), |
|
863
|
|
|
DateIssued=row.get('DateIssued', ''), |
|
864
|
|
|
ReportID=row.get('ReportID', '') |
|
865
|
|
|
) |
|
866
|
|
|
# Getting lab contacts |
|
867
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
868
|
|
|
lab_contacts = [o.getObject() for o in bsc(portal_type="LabContact", is_active=True)] |
|
869
|
|
|
for contact in lab_contacts: |
|
870
|
|
|
if contact.getFullname() == row.get('Worker', ''): |
|
871
|
|
|
obj.setWorker(contact.UID()) |
|
872
|
|
|
obj.unmarkCreationFlag() |
|
873
|
|
|
renameAfterCreation(obj) |
|
874
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
875
|
|
|
|
|
876
|
|
|
|
|
877
|
|
View Code Duplication |
class Instrument_Calibrations(WorksheetImporter): |
|
|
|
|
|
|
878
|
|
|
|
|
879
|
|
|
def Import(self): |
|
880
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
881
|
|
|
for row in self.get_rows(3): |
|
882
|
|
|
if not row.get('instrument', None) or not row.get('title', None): |
|
883
|
|
|
continue |
|
884
|
|
|
|
|
885
|
|
|
folder = self.get_object(bsc, 'Instrument', row.get('instrument')) |
|
886
|
|
|
if folder: |
|
887
|
|
|
obj = _createObjectByType("InstrumentCalibration", folder, tmpID()) |
|
888
|
|
|
obj.edit( |
|
889
|
|
|
title=row['title'], |
|
890
|
|
|
DownFrom=row.get('downfrom', ''), |
|
891
|
|
|
DownTo=row.get('downto', ''), |
|
892
|
|
|
Calibrator=row.get('calibrator', ''), |
|
893
|
|
|
Considerations=row.get('considerations', ''), |
|
894
|
|
|
WorkPerformed=row.get('workperformed', ''), |
|
895
|
|
|
Remarks=row.get('remarks', ''), |
|
896
|
|
|
DateIssued=row.get('DateIssued', ''), |
|
897
|
|
|
ReportID=row.get('ReportID', '') |
|
898
|
|
|
) |
|
899
|
|
|
# Gettinginstrument lab contacts |
|
900
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
901
|
|
|
lab_contacts = [o.getObject() for o in bsc(portal_type="LabContact", nactive_state='active')] |
|
902
|
|
|
for contact in lab_contacts: |
|
903
|
|
|
if contact.getFullname() == row.get('Worker', ''): |
|
904
|
|
|
obj.setWorker(contact.UID()) |
|
905
|
|
|
obj.unmarkCreationFlag() |
|
906
|
|
|
renameAfterCreation(obj) |
|
907
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
908
|
|
|
|
|
909
|
|
|
|
|
910
|
|
|
class Instrument_Certifications(WorksheetImporter): |
|
911
|
|
|
|
|
912
|
|
|
def Import(self): |
|
913
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
914
|
|
|
for row in self.get_rows(3): |
|
915
|
|
|
if not row['instrument'] or not row['title']: |
|
916
|
|
|
continue |
|
917
|
|
|
|
|
918
|
|
|
folder = self.get_object(bsc, 'Instrument', row.get('instrument','')) |
|
919
|
|
|
if folder: |
|
920
|
|
|
obj = _createObjectByType("InstrumentCertification", folder, tmpID()) |
|
921
|
|
|
today = datetime.date.today() |
|
922
|
|
|
certificate_expire_date = today.strftime('%d/%m') + '/' + str(today.year+1) \ |
|
923
|
|
|
if row.get('validto', '') == '' else row.get('validto') |
|
924
|
|
|
certificate_start_date = today.strftime('%d/%m/%Y') \ |
|
925
|
|
|
if row.get('validfrom', '') == '' else row.get('validfrom') |
|
926
|
|
|
obj.edit( |
|
927
|
|
|
title=row['title'], |
|
928
|
|
|
AssetNumber=row.get('assetnumber', ''), |
|
929
|
|
|
Date=row.get('date', ''), |
|
930
|
|
|
ValidFrom=certificate_start_date, |
|
931
|
|
|
ValidTo=certificate_expire_date, |
|
932
|
|
|
Agency=row.get('agency', ''), |
|
933
|
|
|
Remarks=row.get('remarks', ''), |
|
934
|
|
|
) |
|
935
|
|
|
# Attaching the Report Certificate if exists |
|
936
|
|
View Code Duplication |
if row.get('report', None): |
|
|
|
|
|
|
937
|
|
|
path = resource_filename( |
|
938
|
|
|
self.dataset_project, |
|
939
|
|
|
"setupdata/%s/%s" % (self.dataset_name, |
|
940
|
|
|
row['report']) |
|
941
|
|
|
) |
|
942
|
|
|
try: |
|
943
|
|
|
file_data = read_file(path) |
|
944
|
|
|
obj.setDocument(file_data) |
|
945
|
|
|
except Exception as msg: |
|
946
|
|
|
file_data = None |
|
947
|
|
|
logger.warning(msg[0] + " Error on sheet: " + self.sheetname) |
|
948
|
|
|
|
|
949
|
|
|
# Getting lab contacts |
|
950
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
951
|
|
|
lab_contacts = [o.getObject() for o in bsc(portal_type="LabContact", nactive_state='active')] |
|
952
|
|
|
for contact in lab_contacts: |
|
953
|
|
|
if contact.getFullname() == row.get('preparedby', ''): |
|
954
|
|
|
obj.setPreparator(contact.UID()) |
|
955
|
|
|
if contact.getFullname() == row.get('approvedby', ''): |
|
956
|
|
|
obj.setValidator(contact.UID()) |
|
957
|
|
|
obj.unmarkCreationFlag() |
|
958
|
|
|
renameAfterCreation(obj) |
|
959
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
960
|
|
|
|
|
961
|
|
|
|
|
962
|
|
|
class Instrument_Documents(WorksheetImporter): |
|
963
|
|
|
|
|
964
|
|
|
def Import(self): |
|
965
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
966
|
|
|
for row in self.get_rows(3): |
|
967
|
|
|
if not row.get('instrument', ''): |
|
968
|
|
|
continue |
|
969
|
|
|
folder = self.get_object(bsc, 'Instrument', row.get('instrument', '')) |
|
970
|
|
|
addDocument(self, row, folder) |
|
971
|
|
|
|
|
972
|
|
|
def addDocument(self, row_dict, folder): |
|
973
|
|
|
""" |
|
974
|
|
|
This function adds a multifile object to the instrument folder |
|
975
|
|
|
:param row_dict: the dictionary which contains the document information |
|
976
|
|
|
:param folder: the instrument object |
|
977
|
|
|
""" |
|
978
|
|
|
if folder: |
|
979
|
|
|
# This content type need a file |
|
980
|
|
|
if row_dict.get('File', None): |
|
981
|
|
|
path = resource_filename( |
|
982
|
|
|
self.dataset_project, |
|
983
|
|
|
"setupdata/%s/%s" % (self.dataset_name, |
|
984
|
|
|
row_dict['File']) |
|
985
|
|
|
) |
|
986
|
|
|
try: |
|
987
|
|
|
file_data = read_file(path) |
|
988
|
|
|
except Exception as msg: |
|
989
|
|
|
file_data = None |
|
990
|
|
|
logger.warning(msg[0] + " Error on sheet: " + self.sheetname) |
|
991
|
|
|
|
|
992
|
|
|
# Obtain all created instrument documents content type |
|
993
|
|
|
catalog = getToolByName(self.context, 'bika_setup_catalog') |
|
994
|
|
|
documents_brains = catalog.searchResults({'portal_type': 'Multifile'}) |
|
995
|
|
|
# If a the new document has the same DocumentID as a created document, this object won't be created. |
|
996
|
|
|
idAlreadyInUse = False |
|
997
|
|
|
for item in documents_brains: |
|
998
|
|
|
if item.getObject().getDocumentID() == row_dict.get('DocumentID', ''): |
|
999
|
|
|
warning = "The ID '%s' used for this document is already in use on instrument '%s', consequently " \ |
|
1000
|
|
|
"the file hasn't been upload." % (row_dict.get('DocumentID', ''), row_dict.get('instrument', '')) |
|
1001
|
|
|
self.context.plone_utils.addPortalMessage(warning) |
|
1002
|
|
|
idAlreadyInUse = True |
|
1003
|
|
|
if not idAlreadyInUse: |
|
1004
|
|
|
obj = _createObjectByType("Multifile", folder, tmpID()) |
|
1005
|
|
|
obj.edit( |
|
1006
|
|
|
DocumentID=row_dict.get('DocumentID', ''), |
|
1007
|
|
|
DocumentVersion=row_dict.get('DocumentVersion', ''), |
|
1008
|
|
|
DocumentLocation=row_dict.get('DocumentLocation', ''), |
|
1009
|
|
|
DocumentType=row_dict.get('DocumentType', ''), |
|
1010
|
|
|
File=file_data |
|
1011
|
|
|
) |
|
1012
|
|
|
obj.unmarkCreationFlag() |
|
1013
|
|
|
renameAfterCreation(obj) |
|
1014
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1015
|
|
|
|
|
1016
|
|
|
|
|
1017
|
|
|
class Instrument_Maintenance_Tasks(WorksheetImporter): |
|
1018
|
|
|
|
|
1019
|
|
|
def Import(self): |
|
1020
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1021
|
|
|
for row in self.get_rows(3): |
|
1022
|
|
|
if not row['instrument'] or not row['title'] or not row['type']: |
|
1023
|
|
|
continue |
|
1024
|
|
|
|
|
1025
|
|
|
folder = self.get_object(bsc, 'Instrument',row.get('instrument')) |
|
1026
|
|
|
if folder: |
|
1027
|
|
|
obj = _createObjectByType("InstrumentMaintenanceTask", folder, tmpID()) |
|
1028
|
|
|
try: |
|
1029
|
|
|
cost = "%.2f" % (row.get('cost', 0)) |
|
1030
|
|
|
except: |
|
1031
|
|
|
cost = row.get('cost', '0.0') |
|
1032
|
|
|
|
|
1033
|
|
|
obj.edit( |
|
1034
|
|
|
title=row['title'], |
|
1035
|
|
|
description=row['description'], |
|
1036
|
|
|
Type=row['type'], |
|
1037
|
|
|
DownFrom=row.get('downfrom', ''), |
|
1038
|
|
|
DownTo=row.get('downto', ''), |
|
1039
|
|
|
Maintainer=row.get('maintaner', ''), |
|
1040
|
|
|
Considerations=row.get('considerations', ''), |
|
1041
|
|
|
WorkPerformed=row.get('workperformed', ''), |
|
1042
|
|
|
Remarks=row.get('remarks', ''), |
|
1043
|
|
|
Cost=cost, |
|
1044
|
|
|
Closed=self.to_bool(row.get('closed')) |
|
1045
|
|
|
) |
|
1046
|
|
|
obj.unmarkCreationFlag() |
|
1047
|
|
|
renameAfterCreation(obj) |
|
1048
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1049
|
|
|
|
|
1050
|
|
|
|
|
1051
|
|
|
class Instrument_Schedule(WorksheetImporter): |
|
1052
|
|
|
|
|
1053
|
|
|
def Import(self): |
|
1054
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1055
|
|
|
for row in self.get_rows(3): |
|
1056
|
|
|
if not row['instrument'] or not row['title'] or not row['type']: |
|
1057
|
|
|
continue |
|
1058
|
|
|
folder = self.get_object(bsc, 'Instrument', row.get('instrument')) |
|
1059
|
|
|
if folder: |
|
1060
|
|
|
obj = _createObjectByType("InstrumentScheduledTask", folder, tmpID()) |
|
1061
|
|
|
criteria = [ |
|
1062
|
|
|
{'fromenabled': row.get('date', None) is not None, |
|
1063
|
|
|
'fromdate': row.get('date', ''), |
|
1064
|
|
|
'repeatenabled': ((row['numrepeats'] and |
|
1065
|
|
|
row['numrepeats'] > 1) or |
|
1066
|
|
|
(row['repeatuntil'] and |
|
1067
|
|
|
len(row['repeatuntil']) > 0)), |
|
1068
|
|
|
'repeatunit': row.get('numrepeats', ''), |
|
1069
|
|
|
'repeatperiod': row.get('periodicity', ''), |
|
1070
|
|
|
'repeatuntilenabled': (row['repeatuntil'] and |
|
1071
|
|
|
len(row['repeatuntil']) > 0), |
|
1072
|
|
|
'repeatuntil': row.get('repeatuntil')} |
|
1073
|
|
|
] |
|
1074
|
|
|
obj.edit( |
|
1075
|
|
|
title=row['title'], |
|
1076
|
|
|
Type=row['type'], |
|
1077
|
|
|
ScheduleCriteria=criteria, |
|
1078
|
|
|
Considerations=row.get('considerations', ''), |
|
1079
|
|
|
) |
|
1080
|
|
|
obj.unmarkCreationFlag() |
|
1081
|
|
|
renameAfterCreation(obj) |
|
1082
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1083
|
|
|
|
|
1084
|
|
|
|
|
1085
|
|
|
class Sample_Matrices(WorksheetImporter): |
|
1086
|
|
|
|
|
1087
|
|
View Code Duplication |
def Import(self): |
|
|
|
|
|
|
1088
|
|
|
folder = self.context.bika_setup.bika_samplematrices |
|
1089
|
|
|
for row in self.get_rows(3): |
|
1090
|
|
|
if not row['title']: |
|
1091
|
|
|
continue |
|
1092
|
|
|
obj = _createObjectByType("SampleMatrix", folder, tmpID()) |
|
1093
|
|
|
obj.edit( |
|
1094
|
|
|
title=row['title'], |
|
1095
|
|
|
description=row.get('description', '') |
|
1096
|
|
|
) |
|
1097
|
|
|
obj.unmarkCreationFlag() |
|
1098
|
|
|
renameAfterCreation(obj) |
|
1099
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1100
|
|
|
|
|
1101
|
|
|
|
|
1102
|
|
|
class Batch_Labels(WorksheetImporter): |
|
1103
|
|
|
|
|
1104
|
|
|
def Import(self): |
|
1105
|
|
|
folder = self.context.bika_setup.bika_batchlabels |
|
1106
|
|
|
for row in self.get_rows(3): |
|
1107
|
|
|
if row['title']: |
|
1108
|
|
|
obj = _createObjectByType("BatchLabel", folder, tmpID()) |
|
1109
|
|
|
obj.edit(title=row['title']) |
|
1110
|
|
|
obj.unmarkCreationFlag() |
|
1111
|
|
|
renameAfterCreation(obj) |
|
1112
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1113
|
|
|
|
|
1114
|
|
|
|
|
1115
|
|
|
class Sample_Types(WorksheetImporter): |
|
1116
|
|
|
|
|
1117
|
|
|
def Import(self): |
|
1118
|
|
|
folder = self.context.bika_setup.bika_sampletypes |
|
1119
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1120
|
|
|
for row in self.get_rows(3): |
|
1121
|
|
|
if not row['title']: |
|
1122
|
|
|
continue |
|
1123
|
|
|
obj = _createObjectByType("SampleType", folder, tmpID()) |
|
1124
|
|
|
samplematrix = self.get_object(bsc, 'SampleMatrix', |
|
1125
|
|
|
row.get('SampleMatrix_title')) |
|
1126
|
|
|
containertype = self.get_object(bsc, 'ContainerType', |
|
1127
|
|
|
row.get('ContainerType_title')) |
|
1128
|
|
|
retentionperiod = { |
|
1129
|
|
|
'days': row['RetentionPeriod'] if row['RetentionPeriod'] else 0, |
|
1130
|
|
|
'hours': 0, |
|
1131
|
|
|
'minutes': 0} |
|
1132
|
|
|
obj.edit( |
|
1133
|
|
|
title=row['title'], |
|
1134
|
|
|
description=row.get('description', ''), |
|
1135
|
|
|
RetentionPeriod=retentionperiod, |
|
1136
|
|
|
Hazardous=self.to_bool(row['Hazardous']), |
|
1137
|
|
|
SampleMatrix=samplematrix, |
|
1138
|
|
|
Prefix=row['Prefix'], |
|
1139
|
|
|
MinimumVolume=row['MinimumVolume'], |
|
1140
|
|
|
ContainerType=containertype |
|
1141
|
|
|
) |
|
1142
|
|
|
samplepoint = self.get_object(bsc, 'SamplePoint', |
|
1143
|
|
|
row.get('SamplePoint_title')) |
|
1144
|
|
|
if samplepoint: |
|
1145
|
|
|
obj.setSamplePoints([samplepoint, ]) |
|
1146
|
|
|
obj.unmarkCreationFlag() |
|
1147
|
|
|
renameAfterCreation(obj) |
|
1148
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1149
|
|
|
|
|
1150
|
|
|
|
|
1151
|
|
|
class Sample_Points(WorksheetImporter): |
|
1152
|
|
|
|
|
1153
|
|
|
def Import(self): |
|
1154
|
|
|
setup_folder = self.context.bika_setup.bika_samplepoints |
|
1155
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1156
|
|
|
pc = getToolByName(self.context, 'portal_catalog') |
|
1157
|
|
|
for row in self.get_rows(3): |
|
1158
|
|
|
if not row['title']: |
|
1159
|
|
|
continue |
|
1160
|
|
|
if row['Client_title']: |
|
1161
|
|
|
client_title = row['Client_title'] |
|
1162
|
|
|
client = pc(portal_type="Client", getName=client_title) |
|
1163
|
|
|
if len(client) == 0: |
|
1164
|
|
|
error = "Sample Point %s: Client invalid: '%s'. The Sample point will not be uploaded." |
|
1165
|
|
|
logger.error(error, row['title'], client_title) |
|
1166
|
|
|
continue |
|
1167
|
|
|
folder = client[0].getObject() |
|
1168
|
|
|
else: |
|
1169
|
|
|
folder = setup_folder |
|
1170
|
|
|
|
|
1171
|
|
|
if row['Latitude']: |
|
1172
|
|
|
logger.log("Ignored SamplePoint Latitude", 'error') |
|
1173
|
|
|
if row['Longitude']: |
|
1174
|
|
|
logger.log("Ignored SamplePoint Longitude", 'error') |
|
1175
|
|
|
|
|
1176
|
|
|
obj = _createObjectByType("SamplePoint", folder, tmpID()) |
|
1177
|
|
|
obj.edit( |
|
1178
|
|
|
title=row['title'], |
|
1179
|
|
|
description=row.get('description', ''), |
|
1180
|
|
|
Composite=self.to_bool(row['Composite']), |
|
1181
|
|
|
Elevation=row['Elevation'], |
|
1182
|
|
|
) |
|
1183
|
|
|
sampletype = self.get_object(bsc, 'SampleType', |
|
1184
|
|
|
row.get('SampleType_title')) |
|
1185
|
|
|
if sampletype: |
|
1186
|
|
|
obj.setSampleTypes([sampletype, ]) |
|
1187
|
|
|
obj.unmarkCreationFlag() |
|
1188
|
|
|
renameAfterCreation(obj) |
|
1189
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1190
|
|
|
|
|
1191
|
|
|
|
|
1192
|
|
|
class Sample_Point_Sample_Types(WorksheetImporter): |
|
1193
|
|
|
|
|
1194
|
|
|
def Import(self): |
|
1195
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1196
|
|
|
for row in self.get_rows(3): |
|
1197
|
|
|
sampletype = self.get_object(bsc, |
|
1198
|
|
|
'SampleType', |
|
1199
|
|
|
row.get('SampleType_title')) |
|
1200
|
|
|
samplepoint = self.get_object(bsc, |
|
1201
|
|
|
'SamplePoint', |
|
1202
|
|
|
row['SamplePoint_title']) |
|
1203
|
|
|
if samplepoint: |
|
1204
|
|
|
sampletypes = samplepoint.getSampleTypes() |
|
1205
|
|
|
if sampletype not in sampletypes: |
|
1206
|
|
|
sampletypes.append(sampletype) |
|
1207
|
|
|
samplepoint.setSampleTypes(sampletypes) |
|
1208
|
|
|
|
|
1209
|
|
|
if sampletype: |
|
1210
|
|
|
samplepoints = sampletype.getSamplePoints() |
|
1211
|
|
|
if samplepoint not in samplepoints: |
|
1212
|
|
|
samplepoints.append(samplepoint) |
|
1213
|
|
|
sampletype.setSamplePoints(samplepoints) |
|
1214
|
|
|
|
|
1215
|
|
|
class Storage_Locations(WorksheetImporter): |
|
1216
|
|
|
|
|
1217
|
|
|
def Import(self): |
|
1218
|
|
|
setup_folder = self.context.bika_setup.bika_storagelocations |
|
1219
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1220
|
|
|
pc = getToolByName(self.context, 'portal_catalog') |
|
1221
|
|
|
for row in self.get_rows(3): |
|
1222
|
|
|
if not row['Address']: |
|
1223
|
|
|
continue |
|
1224
|
|
|
|
|
1225
|
|
|
obj = _createObjectByType("StorageLocation", setup_folder, tmpID()) |
|
1226
|
|
|
obj.edit( |
|
1227
|
|
|
title=row['Address'], |
|
1228
|
|
|
SiteTitle=row['SiteTitle'], |
|
1229
|
|
|
SiteCode=row['SiteCode'], |
|
1230
|
|
|
SiteDescription=row['SiteDescription'], |
|
1231
|
|
|
LocationTitle=row['LocationTitle'], |
|
1232
|
|
|
LocationCode=row['LocationCode'], |
|
1233
|
|
|
LocationDescription=row['LocationDescription'], |
|
1234
|
|
|
LocationType=row['LocationType'], |
|
1235
|
|
|
ShelfTitle=row['ShelfTitle'], |
|
1236
|
|
|
ShelfCode=row['ShelfCode'], |
|
1237
|
|
|
ShelfDescription=row['ShelfDescription'], |
|
1238
|
|
|
) |
|
1239
|
|
|
obj.unmarkCreationFlag() |
|
1240
|
|
|
renameAfterCreation(obj) |
|
1241
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1242
|
|
|
|
|
1243
|
|
|
|
|
1244
|
|
|
class Sample_Conditions(WorksheetImporter): |
|
1245
|
|
|
|
|
1246
|
|
View Code Duplication |
def Import(self): |
|
|
|
|
|
|
1247
|
|
|
folder = self.context.bika_setup.bika_sampleconditions |
|
1248
|
|
|
for row in self.get_rows(3): |
|
1249
|
|
|
if row['Title']: |
|
1250
|
|
|
obj = _createObjectByType("SampleCondition", folder, tmpID()) |
|
1251
|
|
|
obj.edit( |
|
1252
|
|
|
title=row['Title'], |
|
1253
|
|
|
description=row.get('Description', '') |
|
1254
|
|
|
) |
|
1255
|
|
|
obj.unmarkCreationFlag() |
|
1256
|
|
|
renameAfterCreation(obj) |
|
1257
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1258
|
|
|
|
|
1259
|
|
|
|
|
1260
|
|
|
class Analysis_Categories(WorksheetImporter): |
|
1261
|
|
|
|
|
1262
|
|
|
def Import(self): |
|
1263
|
|
|
folder = self.context.bika_setup.bika_analysiscategories |
|
1264
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1265
|
|
|
for row in self.get_rows(3): |
|
1266
|
|
|
department = None |
|
1267
|
|
|
if row.get('Department_title', None): |
|
1268
|
|
|
department = self.get_object(bsc, 'Department', |
|
1269
|
|
|
row.get('Department_title')) |
|
1270
|
|
|
if row.get('title', None) and department: |
|
1271
|
|
|
obj = _createObjectByType("AnalysisCategory", folder, tmpID()) |
|
1272
|
|
|
obj.edit( |
|
1273
|
|
|
title=row['title'], |
|
1274
|
|
|
description=row.get('description', '')) |
|
1275
|
|
|
obj.setDepartment(department) |
|
1276
|
|
|
obj.unmarkCreationFlag() |
|
1277
|
|
|
renameAfterCreation(obj) |
|
1278
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1279
|
|
|
elif not row.get('title', None): |
|
1280
|
|
|
logger.warning("Error in in " + self.sheetname + ". Missing Title field") |
|
1281
|
|
|
elif not row.get('Department_title', None): |
|
1282
|
|
|
logger.warning("Error in " + self.sheetname + ". Department field missing.") |
|
1283
|
|
|
else: |
|
1284
|
|
|
logger.warning("Error in " + self.sheetname + ". Department " |
|
1285
|
|
|
+ row.get('Department_title') + "is wrong.") |
|
1286
|
|
|
|
|
1287
|
|
|
|
|
1288
|
|
|
class Methods(WorksheetImporter): |
|
1289
|
|
|
|
|
1290
|
|
|
def Import(self): |
|
1291
|
|
|
folder = self.context.methods |
|
1292
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1293
|
|
|
for row in self.get_rows(3): |
|
1294
|
|
|
if row['title']: |
|
1295
|
|
|
calculation = self.get_object(bsc, 'Calculation', row.get('Calculation_title')) |
|
1296
|
|
|
obj = _createObjectByType("Method", folder, tmpID()) |
|
1297
|
|
|
obj.edit( |
|
1298
|
|
|
title=row['title'], |
|
1299
|
|
|
description=row.get('description', ''), |
|
1300
|
|
|
Instructions=row.get('Instructions', ''), |
|
1301
|
|
|
ManualEntryOfResults=row.get('ManualEntryOfResults', True), |
|
1302
|
|
|
Calculation=calculation, |
|
1303
|
|
|
MethodID=row.get('MethodID', ''), |
|
1304
|
|
|
Accredited=row.get('Accredited', True), |
|
1305
|
|
|
) |
|
1306
|
|
|
# Obtain all created methods |
|
1307
|
|
|
catalog = getToolByName(self.context, 'portal_catalog') |
|
1308
|
|
|
methods_brains = catalog.searchResults({'portal_type': 'Method'}) |
|
1309
|
|
|
# If a the new method has the same MethodID as a created method, remove MethodID value. |
|
1310
|
|
|
for methods in methods_brains: |
|
1311
|
|
|
if methods.getObject().get('MethodID', '') != '' and methods.getObject.get('MethodID', '') == obj['MethodID']: |
|
1312
|
|
|
obj.edit(MethodID='') |
|
1313
|
|
|
|
|
1314
|
|
View Code Duplication |
if row['MethodDocument']: |
|
|
|
|
|
|
1315
|
|
|
path = resource_filename( |
|
1316
|
|
|
self.dataset_project, |
|
1317
|
|
|
"setupdata/%s/%s" % (self.dataset_name, |
|
1318
|
|
|
row['MethodDocument']) |
|
1319
|
|
|
) |
|
1320
|
|
|
try: |
|
1321
|
|
|
file_data = read_file(path) |
|
1322
|
|
|
obj.setMethodDocument(file_data) |
|
1323
|
|
|
except Exception as msg: |
|
1324
|
|
|
logger.warning(msg[0] + " Error on sheet: " + self.sheetname) |
|
1325
|
|
|
|
|
1326
|
|
|
obj.unmarkCreationFlag() |
|
1327
|
|
|
renameAfterCreation(obj) |
|
1328
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1329
|
|
|
|
|
1330
|
|
|
|
|
1331
|
|
|
class Sampling_Deviations(WorksheetImporter): |
|
1332
|
|
|
|
|
1333
|
|
View Code Duplication |
def Import(self): |
|
|
|
|
|
|
1334
|
|
|
folder = self.context.bika_setup.bika_samplingdeviations |
|
1335
|
|
|
for row in self.get_rows(3): |
|
1336
|
|
|
if row['title']: |
|
1337
|
|
|
obj = _createObjectByType("SamplingDeviation", folder, tmpID()) |
|
1338
|
|
|
obj.edit( |
|
1339
|
|
|
title=row['title'], |
|
1340
|
|
|
description=row.get('description', '') |
|
1341
|
|
|
) |
|
1342
|
|
|
obj.unmarkCreationFlag() |
|
1343
|
|
|
renameAfterCreation(obj) |
|
1344
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1345
|
|
|
|
|
1346
|
|
|
|
|
1347
|
|
|
class Calculations(WorksheetImporter): |
|
1348
|
|
|
|
|
1349
|
|
|
def get_interim_fields(self): |
|
1350
|
|
|
# preload Calculation Interim Fields sheet |
|
1351
|
|
|
sheetname = 'Calculation Interim Fields' |
|
1352
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1353
|
|
|
if not worksheet: |
|
1354
|
|
|
return |
|
1355
|
|
|
self.interim_fields = {} |
|
1356
|
|
|
rows = self.get_rows(3, worksheet=worksheet) |
|
1357
|
|
|
for row in rows: |
|
1358
|
|
|
calc_title = row['Calculation_title'] |
|
1359
|
|
|
if calc_title not in self.interim_fields.keys(): |
|
1360
|
|
|
self.interim_fields[calc_title] = [] |
|
1361
|
|
|
self.interim_fields[calc_title].append({ |
|
1362
|
|
|
'keyword': row['keyword'], |
|
1363
|
|
|
'title': row.get('title', ''), |
|
1364
|
|
|
'type': 'int', |
|
1365
|
|
|
'hidden': ('hidden' in row and row['hidden']) and True or False, |
|
1366
|
|
|
'value': row['value'], |
|
1367
|
|
|
'unit': row['unit'] and row['unit'] or ''}) |
|
1368
|
|
|
|
|
1369
|
|
|
def Import(self): |
|
1370
|
|
|
self.get_interim_fields() |
|
1371
|
|
|
folder = self.context.bika_setup.bika_calculations |
|
1372
|
|
|
for row in self.get_rows(3): |
|
1373
|
|
|
if not row['title']: |
|
1374
|
|
|
continue |
|
1375
|
|
|
calc_title = row['title'] |
|
1376
|
|
|
calc_interims = self.interim_fields.get(calc_title, []) |
|
1377
|
|
|
formula = row['Formula'] |
|
1378
|
|
|
# scan formula for dep services |
|
1379
|
|
|
keywords = re.compile(r"\[([^\.^\]]+)\]").findall(formula) |
|
1380
|
|
|
# remove interims from deps |
|
1381
|
|
|
interim_keys = [k['keyword'] for k in calc_interims] |
|
1382
|
|
|
dep_keywords = [k for k in keywords if k not in interim_keys] |
|
1383
|
|
|
|
|
1384
|
|
|
obj = _createObjectByType("Calculation", folder, tmpID()) |
|
1385
|
|
|
obj.edit( |
|
1386
|
|
|
title=calc_title, |
|
1387
|
|
|
description=row.get('description', ''), |
|
1388
|
|
|
InterimFields=calc_interims, |
|
1389
|
|
|
Formula=str(row['Formula']) |
|
1390
|
|
|
) |
|
1391
|
|
|
for kw in dep_keywords: |
|
1392
|
|
|
self.defer(src_obj=obj, |
|
1393
|
|
|
src_field='DependentServices', |
|
1394
|
|
|
dest_catalog='bika_setup_catalog', |
|
1395
|
|
|
dest_query={'portal_type': 'AnalysisService', |
|
1396
|
|
|
'getKeyword': kw} |
|
1397
|
|
|
) |
|
1398
|
|
|
obj.unmarkCreationFlag() |
|
1399
|
|
|
renameAfterCreation(obj) |
|
1400
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1401
|
|
|
|
|
1402
|
|
|
# Now we have the calculations registered, try to assign default calcs |
|
1403
|
|
|
# to methods |
|
1404
|
|
|
sheet = self.workbook.get_sheet_by_name("Methods") |
|
1405
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1406
|
|
|
for row in self.get_rows(3, sheet): |
|
1407
|
|
|
if row.get('title', '') and row.get('Calculation_title', ''): |
|
1408
|
|
|
meth = self.get_object(bsc, "Method", row.get('title')) |
|
1409
|
|
|
if meth and not meth.getCalculation(): |
|
1410
|
|
|
calctit = safe_unicode(row['Calculation_title']).encode('utf-8') |
|
1411
|
|
|
calc = self.get_object(bsc, "Calculation", calctit) |
|
1412
|
|
|
if calc: |
|
1413
|
|
|
meth.setCalculation(calc.UID()) |
|
1414
|
|
|
|
|
1415
|
|
|
|
|
1416
|
|
|
class Analysis_Services(WorksheetImporter): |
|
1417
|
|
|
|
|
1418
|
|
|
def load_interim_fields(self): |
|
1419
|
|
|
# preload AnalysisService InterimFields sheet |
|
1420
|
|
|
sheetname = 'AnalysisService InterimFields' |
|
1421
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1422
|
|
|
if not worksheet: |
|
1423
|
|
|
return |
|
1424
|
|
|
self.service_interims = {} |
|
1425
|
|
|
rows = self.get_rows(3, worksheet=worksheet) |
|
1426
|
|
|
for row in rows: |
|
1427
|
|
|
service_title = row['Service_title'] |
|
1428
|
|
|
if service_title not in self.service_interims.keys(): |
|
1429
|
|
|
self.service_interims[service_title] = [] |
|
1430
|
|
|
self.service_interims[service_title].append({ |
|
1431
|
|
|
'keyword': row['keyword'], |
|
1432
|
|
|
'title': row.get('title', ''), |
|
1433
|
|
|
'type': 'int', |
|
1434
|
|
|
'value': row['value'], |
|
1435
|
|
|
'unit': row['unit'] and row['unit'] or ''}) |
|
1436
|
|
|
|
|
1437
|
|
|
def load_result_options(self): |
|
1438
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1439
|
|
|
sheetname = 'AnalysisService ResultOptions' |
|
1440
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1441
|
|
|
if not worksheet: |
|
1442
|
|
|
return |
|
1443
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1444
|
|
|
service = self.get_object(bsc, 'AnalysisService', |
|
1445
|
|
|
row.get('Service_title')) |
|
1446
|
|
|
if not service: |
|
1447
|
|
|
return |
|
1448
|
|
|
sro = service.getResultOptions() |
|
1449
|
|
|
sro.append({'ResultValue': row['ResultValue'], |
|
1450
|
|
|
'ResultText': row['ResultText']}) |
|
1451
|
|
|
service.setResultOptions(sro) |
|
1452
|
|
|
|
|
1453
|
|
|
def load_service_uncertainties(self): |
|
1454
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1455
|
|
|
sheetname = 'AnalysisService Uncertainties' |
|
1456
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1457
|
|
|
if not worksheet: |
|
1458
|
|
|
return |
|
1459
|
|
|
|
|
1460
|
|
|
bucket = {} |
|
1461
|
|
|
count = 0 |
|
1462
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1463
|
|
|
count += 1 |
|
1464
|
|
|
service = self.get_object(bsc, 'AnalysisService', |
|
1465
|
|
|
row.get('Service_title')) |
|
1466
|
|
|
if not service: |
|
1467
|
|
|
warning = "Unable to load an Analysis Service uncertainty. Service '%s' not found." % row.get('Service_title') |
|
1468
|
|
|
logger.warning(warning) |
|
1469
|
|
|
continue |
|
1470
|
|
|
service_uid = service.UID() |
|
1471
|
|
|
if service_uid not in bucket: |
|
1472
|
|
|
bucket[service_uid] = [] |
|
1473
|
|
|
bucket[service_uid].append( |
|
1474
|
|
|
{'intercept_min': row['Range Min'], |
|
1475
|
|
|
'intercept_max': row['Range Max'], |
|
1476
|
|
|
'errorvalue': row['Uncertainty Value']} |
|
1477
|
|
|
) |
|
1478
|
|
|
if count > 500: |
|
1479
|
|
|
self.write_bucket(bucket) |
|
1480
|
|
|
bucket = {} |
|
1481
|
|
|
if bucket: |
|
1482
|
|
|
self.write_bucket(bucket) |
|
1483
|
|
|
|
|
1484
|
|
|
def get_methods(self, service_title, default_method): |
|
1485
|
|
|
""" Return an array of objects of the type Method in accordance to the |
|
1486
|
|
|
methods listed in the 'AnalysisService Methods' sheet and service |
|
1487
|
|
|
set in the parameter service_title. |
|
1488
|
|
|
If default_method is set, it will be included in the returned |
|
1489
|
|
|
array. |
|
1490
|
|
|
""" |
|
1491
|
|
|
return self.get_relations(service_title, |
|
1492
|
|
|
default_method, |
|
1493
|
|
|
'Method', |
|
1494
|
|
|
'portal_catalog', |
|
1495
|
|
|
'AnalysisService Methods', |
|
1496
|
|
|
'Method_title') |
|
1497
|
|
|
|
|
1498
|
|
|
def get_instruments(self, service_title, default_instrument): |
|
1499
|
|
|
""" Return an array of objects of the type Instrument in accordance to |
|
1500
|
|
|
the instruments listed in the 'AnalysisService Instruments' sheet |
|
1501
|
|
|
and service set in the parameter 'service_title'. |
|
1502
|
|
|
If default_instrument is set, it will be included in the returned |
|
1503
|
|
|
array. |
|
1504
|
|
|
""" |
|
1505
|
|
|
return self.get_relations(service_title, |
|
1506
|
|
|
default_instrument, |
|
1507
|
|
|
'Instrument', |
|
1508
|
|
|
'bika_setup_catalog', |
|
1509
|
|
|
'AnalysisService Instruments', |
|
1510
|
|
|
'Instrument_title') |
|
1511
|
|
|
|
|
1512
|
|
|
def get_relations(self, service_title, default_obj, obj_type, catalog_name, sheet_name, column): |
|
1513
|
|
|
""" Return an array of objects of the specified type in accordance to |
|
1514
|
|
|
the object titles defined in the sheet specified in 'sheet_name' and |
|
1515
|
|
|
service set in the paramenter 'service_title'. |
|
1516
|
|
|
If a default_obj is set, it will be included in the returned array. |
|
1517
|
|
|
""" |
|
1518
|
|
|
out_objects = [default_obj] if default_obj else [] |
|
1519
|
|
|
cat = getToolByName(self.context, catalog_name) |
|
1520
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheet_name) |
|
1521
|
|
|
if not worksheet: |
|
1522
|
|
|
return out_objects |
|
1523
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1524
|
|
|
row_as_title = row.get('Service_title') |
|
1525
|
|
|
if not row_as_title: |
|
1526
|
|
|
return out_objects |
|
1527
|
|
|
elif row_as_title != service_title: |
|
1528
|
|
|
continue |
|
1529
|
|
|
obj = self.get_object(cat, obj_type, row.get(column)) |
|
1530
|
|
|
if obj: |
|
1531
|
|
|
if default_obj and default_obj.UID() == obj.UID(): |
|
1532
|
|
|
continue |
|
1533
|
|
|
out_objects.append(obj) |
|
1534
|
|
|
return out_objects |
|
1535
|
|
|
|
|
1536
|
|
|
def write_bucket(self, bucket): |
|
1537
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1538
|
|
|
for service_uid, uncertainties in bucket.items(): |
|
1539
|
|
|
obj = bsc(UID=service_uid)[0].getObject() |
|
1540
|
|
|
_uncert = list(obj.getUncertainties()) |
|
1541
|
|
|
_uncert.extend(uncertainties) |
|
1542
|
|
|
obj.setUncertainties(_uncert) |
|
1543
|
|
|
|
|
1544
|
|
|
def Import(self): |
|
1545
|
|
|
self.load_interim_fields() |
|
1546
|
|
|
folder = self.context.bika_setup.bika_analysisservices |
|
1547
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1548
|
|
|
pc = getToolByName(self.context, 'portal_catalog') |
|
1549
|
|
|
for row in self.get_rows(3): |
|
1550
|
|
|
if not row['title']: |
|
1551
|
|
|
continue |
|
1552
|
|
|
|
|
1553
|
|
|
obj = _createObjectByType("AnalysisService", folder, tmpID()) |
|
1554
|
|
|
MTA = { |
|
1555
|
|
|
'days': self.to_int(row.get('MaxTimeAllowed_days',0),0), |
|
1556
|
|
|
'hours': self.to_int(row.get('MaxTimeAllowed_hours',0),0), |
|
1557
|
|
|
'minutes': self.to_int(row.get('MaxTimeAllowed_minutes',0),0), |
|
1558
|
|
|
} |
|
1559
|
|
|
category = self.get_object(bsc, 'AnalysisCategory', row.get('AnalysisCategory_title')) |
|
1560
|
|
|
department = self.get_object(bsc, 'Department', row.get('Department_title')) |
|
1561
|
|
|
container = self.get_object(bsc, 'Container', row.get('Container_title')) |
|
1562
|
|
|
preservation = self.get_object(bsc, 'Preservation', row.get('Preservation_title')) |
|
1563
|
|
|
|
|
1564
|
|
|
# Analysis Service - Method considerations: |
|
1565
|
|
|
# One Analysis Service can have 0 or n Methods associated (field |
|
1566
|
|
|
# 'Methods' from the Schema). |
|
1567
|
|
|
# If the Analysis Service has at least one method associated, then |
|
1568
|
|
|
# one of those methods can be set as the defualt method (field |
|
1569
|
|
|
# '_Method' from the Schema). |
|
1570
|
|
|
# |
|
1571
|
|
|
# To make it easier, if a DefaultMethod is declared in the |
|
1572
|
|
|
# Analysis_Services spreadsheet, but the same AS has no method |
|
1573
|
|
|
# associated in the Analysis_Service_Methods spreadsheet, then make |
|
1574
|
|
|
# the assumption that the DefaultMethod set in the former has to be |
|
1575
|
|
|
# associated to the AS although the relation is missing. |
|
1576
|
|
|
defaultmethod = self.get_object(pc, 'Method', row.get('DefaultMethod_title')) |
|
1577
|
|
|
methods = self.get_methods(row['title'], defaultmethod) |
|
1578
|
|
|
if not defaultmethod and methods: |
|
1579
|
|
|
defaultmethod = methods[0] |
|
1580
|
|
|
|
|
1581
|
|
|
# Analysis Service - Instrument considerations: |
|
1582
|
|
|
# By default, an Analysis Services will be associated automatically |
|
1583
|
|
|
# with several Instruments due to the Analysis Service - Methods |
|
1584
|
|
|
# relation (an Instrument can be assigned to a Method and one Method |
|
1585
|
|
|
# can have zero or n Instruments associated). There is no need to |
|
1586
|
|
|
# set this assignment directly, the AnalysisService object will |
|
1587
|
|
|
# find those instruments. |
|
1588
|
|
|
# Besides this 'automatic' behavior, an Analysis Service can also |
|
1589
|
|
|
# have 0 or n Instruments manually associated ('Instruments' field). |
|
1590
|
|
|
# In this case, the attribute 'AllowInstrumentEntryOfResults' should |
|
1591
|
|
|
# be set to True. |
|
1592
|
|
|
# |
|
1593
|
|
|
# To make it easier, if a DefaultInstrument is declared in the |
|
1594
|
|
|
# Analysis_Services spreadsheet, but the same AS has no instrument |
|
1595
|
|
|
# associated in the AnalysisService_Instruments spreadsheet, then |
|
1596
|
|
|
# make the assumption the DefaultInstrument set in the former has |
|
1597
|
|
|
# to be associated to the AS although the relation is missing and |
|
1598
|
|
|
# the option AllowInstrumentEntryOfResults will be set to True. |
|
1599
|
|
|
defaultinstrument = self.get_object(bsc, 'Instrument', row.get('DefaultInstrument_title')) |
|
1600
|
|
|
instruments = self.get_instruments(row['title'], defaultinstrument) |
|
1601
|
|
|
allowinstrentry = True if instruments else False |
|
1602
|
|
|
if not defaultinstrument and instruments: |
|
1603
|
|
|
defaultinstrument = instruments[0] |
|
1604
|
|
|
|
|
1605
|
|
|
# The manual entry of results can only be set to false if the value |
|
1606
|
|
|
# for the attribute "InstrumentEntryOfResults" is False. |
|
1607
|
|
|
allowmanualentry = True if not allowinstrentry else row.get('ManualEntryOfResults', True) |
|
1608
|
|
|
|
|
1609
|
|
|
# Analysis Service - Calculation considerations: |
|
1610
|
|
|
# By default, the AnalysisService will use the Calculation associated |
|
1611
|
|
|
# to the Default Method (the field "UseDefaultCalculation"==True). |
|
1612
|
|
|
# If the Default Method for this AS doesn't have any Calculation |
|
1613
|
|
|
# associated and the field "UseDefaultCalculation" is True, no |
|
1614
|
|
|
# Calculation will be used for this AS ("_Calculation" field is |
|
1615
|
|
|
# reserved and should not be set directly). |
|
1616
|
|
|
# |
|
1617
|
|
|
# To make it easier, if a Calculation is set by default in the |
|
1618
|
|
|
# spreadsheet, then assume the UseDefaultCalculation has to be set |
|
1619
|
|
|
# to False. |
|
1620
|
|
|
deferredcalculation = self.get_object(bsc, 'Calculation', row.get('Calculation_title')) |
|
1621
|
|
|
usedefaultcalculation = False if deferredcalculation else True |
|
1622
|
|
|
_calculation = deferredcalculation if deferredcalculation else \ |
|
1623
|
|
|
(defaultmethod.getCalculation() if defaultmethod else None) |
|
1624
|
|
|
|
|
1625
|
|
|
obj.edit( |
|
1626
|
|
|
title=row['title'], |
|
1627
|
|
|
ShortTitle=row.get('ShortTitle', row['title']), |
|
1628
|
|
|
description=row.get('description', ''), |
|
1629
|
|
|
Keyword=row['Keyword'], |
|
1630
|
|
|
PointOfCapture=row['PointOfCapture'].lower(), |
|
1631
|
|
|
Category=category, |
|
1632
|
|
|
Department=department, |
|
1633
|
|
|
AttachmentOption=row.get('Attachment', '')[0].lower() if row.get('Attachment', '') else 'p', |
|
1634
|
|
|
Unit=row['Unit'] and row['Unit'] or None, |
|
1635
|
|
|
Precision=row['Precision'] and str(row['Precision']) or '0', |
|
1636
|
|
|
ExponentialFormatPrecision=str(self.to_int(row.get('ExponentialFormatPrecision',7),7)), |
|
1637
|
|
|
LowerDetectionLimit='%06f' % self.to_float(row.get('LowerDetectionLimit', '0.0'), 0), |
|
1638
|
|
|
UpperDetectionLimit='%06f' % self.to_float(row.get('UpperDetectionLimit', '1000000000.0'), 1000000000.0), |
|
1639
|
|
|
DetectionLimitSelector=self.to_bool(row.get('DetectionLimitSelector',0)), |
|
1640
|
|
|
MaxTimeAllowed=MTA, |
|
1641
|
|
|
Price="%02f" % Float(row['Price']), |
|
1642
|
|
|
BulkPrice="%02f" % Float(row['BulkPrice']), |
|
1643
|
|
|
VAT="%02f" % Float(row['VAT']), |
|
1644
|
|
|
_Method=defaultmethod, |
|
1645
|
|
|
Methods=methods, |
|
1646
|
|
|
ManualEntryOfResults=allowmanualentry, |
|
1647
|
|
|
InstrumentEntryOfResults=allowinstrentry, |
|
1648
|
|
|
Instruments=instruments, |
|
1649
|
|
|
Calculation=_calculation, |
|
1650
|
|
|
UseDefaultCalculation=usedefaultcalculation, |
|
1651
|
|
|
DuplicateVariation="%02f" % Float(row['DuplicateVariation']), |
|
1652
|
|
|
Accredited=self.to_bool(row['Accredited']), |
|
1653
|
|
|
InterimFields=hasattr(self, 'service_interims') and self.service_interims.get( |
|
1654
|
|
|
row['title'], []) or [], |
|
1655
|
|
|
Separate=self.to_bool(row.get('Separate', False)), |
|
1656
|
|
|
Container=container, |
|
1657
|
|
|
Preservation=preservation, |
|
1658
|
|
|
CommercialID=row.get('CommercialID', ''), |
|
1659
|
|
|
ProtocolID=row.get('ProtocolID', '') |
|
1660
|
|
|
) |
|
1661
|
|
|
obj.unmarkCreationFlag() |
|
1662
|
|
|
renameAfterCreation(obj) |
|
1663
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1664
|
|
|
self.load_result_options() |
|
1665
|
|
|
self.load_service_uncertainties() |
|
1666
|
|
|
|
|
1667
|
|
|
|
|
1668
|
|
|
class Analysis_Specifications(WorksheetImporter): |
|
1669
|
|
|
|
|
1670
|
|
|
def resolve_service(self, row): |
|
1671
|
|
|
bsc = getToolByName(self.context, "bika_setup_catalog") |
|
1672
|
|
|
service = bsc( |
|
1673
|
|
|
portal_type="AnalysisService", |
|
1674
|
|
|
title=safe_unicode(row["service"]) |
|
1675
|
|
|
) |
|
1676
|
|
|
if not service: |
|
1677
|
|
|
service = bsc( |
|
1678
|
|
|
portal_type="AnalysisService", |
|
1679
|
|
|
getKeyword=safe_unicode(row["service"]) |
|
1680
|
|
|
) |
|
1681
|
|
|
service = service[0].getObject() |
|
1682
|
|
|
return service |
|
1683
|
|
|
|
|
1684
|
|
|
def Import(self): |
|
1685
|
|
|
s_t = "" |
|
1686
|
|
|
bucket = {} |
|
1687
|
|
|
pc = getToolByName(self.context, "portal_catalog") |
|
1688
|
|
|
bsc = getToolByName(self.context, "bika_setup_catalog") |
|
1689
|
|
|
# collect up all values into the bucket |
|
1690
|
|
|
for row in self.get_rows(3): |
|
1691
|
|
|
title = row.get("Title", False) |
|
1692
|
|
|
if not title: |
|
1693
|
|
|
title = row.get("title", False) |
|
1694
|
|
|
if not title: |
|
1695
|
|
|
continue |
|
1696
|
|
|
parent = row["Client_title"] if row["Client_title"] else "lab" |
|
1697
|
|
|
st = row["SampleType_title"] if row["SampleType_title"] else "" |
|
1698
|
|
|
service = self.resolve_service(row) |
|
1699
|
|
|
|
|
1700
|
|
|
if parent not in bucket: |
|
1701
|
|
|
bucket[parent] = {} |
|
1702
|
|
|
if title not in bucket[parent]: |
|
1703
|
|
|
bucket[parent][title] = {"sampletype": st, "resultsrange": []} |
|
1704
|
|
|
bucket[parent][title]["resultsrange"].append({ |
|
1705
|
|
|
"keyword": service.getKeyword(), |
|
1706
|
|
|
"min": row["min"] if row["min"] else "0", |
|
1707
|
|
|
"max": row["max"] if row["max"] else "0", |
|
1708
|
|
|
"error": row["error"] if row["error"] else "0" |
|
1709
|
|
|
}) |
|
1710
|
|
|
# write objects. |
|
1711
|
|
|
for parent in bucket.keys(): |
|
1712
|
|
|
for title in bucket[parent]: |
|
1713
|
|
|
if parent == "lab": |
|
1714
|
|
|
folder = self.context.bika_setup.bika_analysisspecs |
|
1715
|
|
|
else: |
|
1716
|
|
|
proxy = pc(portal_type="Client", getName=safe_unicode(parent))[0] |
|
1717
|
|
|
folder = proxy.getObject() |
|
1718
|
|
|
st = bucket[parent][title]["sampletype"] |
|
1719
|
|
|
resultsrange = bucket[parent][title]["resultsrange"] |
|
1720
|
|
|
if st: |
|
1721
|
|
|
st_uid = bsc(portal_type="SampleType", title=safe_unicode(st))[0].UID |
|
1722
|
|
|
obj = _createObjectByType("AnalysisSpec", folder, tmpID()) |
|
1723
|
|
|
obj.edit(title=title) |
|
1724
|
|
|
obj.setResultsRange(resultsrange) |
|
1725
|
|
|
if st: |
|
1726
|
|
|
obj.setSampleType(st_uid) |
|
1727
|
|
|
obj.unmarkCreationFlag() |
|
1728
|
|
|
renameAfterCreation(obj) |
|
1729
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1730
|
|
|
|
|
1731
|
|
|
|
|
1732
|
|
|
class Analysis_Profiles(WorksheetImporter): |
|
1733
|
|
|
|
|
1734
|
|
|
def load_analysis_profile_services(self): |
|
1735
|
|
|
sheetname = 'Analysis Profile Services' |
|
1736
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1737
|
|
|
self.profile_services = {} |
|
1738
|
|
|
if not worksheet: |
|
1739
|
|
|
return |
|
1740
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1741
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1742
|
|
|
if not row.get('Profile','') or not row.get('Service',''): |
|
1743
|
|
|
continue |
|
1744
|
|
|
if row['Profile'] not in self.profile_services.keys(): |
|
1745
|
|
|
self.profile_services[row['Profile']] = [] |
|
1746
|
|
|
# Here we match againts Keyword or Title. |
|
1747
|
|
|
# XXX We need a utility for this kind of thing. |
|
1748
|
|
|
service = self.get_object(bsc, 'AnalysisService', row.get('Service')) |
|
1749
|
|
|
if not service: |
|
1750
|
|
|
service = bsc(portal_type='AnalysisService', |
|
1751
|
|
|
getKeyword=row['Service'])[0].getObject() |
|
1752
|
|
|
self.profile_services[row['Profile']].append(service) |
|
1753
|
|
|
|
|
1754
|
|
|
def Import(self): |
|
1755
|
|
|
self.load_analysis_profile_services() |
|
1756
|
|
|
folder = self.context.bika_setup.bika_analysisprofiles |
|
1757
|
|
|
for row in self.get_rows(3): |
|
1758
|
|
|
if row['title']: |
|
1759
|
|
|
obj = _createObjectByType("AnalysisProfile", folder, tmpID()) |
|
1760
|
|
|
obj.edit(title=row['title'], |
|
1761
|
|
|
description=row.get('description', ''), |
|
1762
|
|
|
ProfileKey=row['ProfileKey'], |
|
1763
|
|
|
CommercialID=row.get('CommercialID', ''), |
|
1764
|
|
|
AnalysisProfilePrice="%02f" % Float(row.get('AnalysisProfilePrice', '0.0')), |
|
1765
|
|
|
AnalysisProfileVAT="%02f" % Float(row.get('AnalysisProfileVAT', '0.0')), |
|
1766
|
|
|
UseAnalysisProfilePrice=row.get('UseAnalysisProfilePrice', False) |
|
1767
|
|
|
) |
|
1768
|
|
|
obj.setService(self.profile_services[row['title']]) |
|
1769
|
|
|
obj.unmarkCreationFlag() |
|
1770
|
|
|
renameAfterCreation(obj) |
|
1771
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1772
|
|
|
|
|
1773
|
|
|
|
|
1774
|
|
|
class AR_Templates(WorksheetImporter): |
|
1775
|
|
|
|
|
1776
|
|
View Code Duplication |
def load_artemplate_analyses(self): |
|
|
|
|
|
|
1777
|
|
|
sheetname = 'AR Template Analyses' |
|
1778
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1779
|
|
|
self.artemplate_analyses = {} |
|
1780
|
|
|
if not worksheet: |
|
1781
|
|
|
return |
|
1782
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1783
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1784
|
|
|
# XXX service_uid is not a uid |
|
1785
|
|
|
service = self.get_object(bsc, 'AnalysisService', |
|
1786
|
|
|
row.get('service_uid')) |
|
1787
|
|
|
if row['ARTemplate'] not in self.artemplate_analyses.keys(): |
|
1788
|
|
|
self.artemplate_analyses[row['ARTemplate']] = [] |
|
1789
|
|
|
self.artemplate_analyses[row['ARTemplate']].append( |
|
1790
|
|
|
{'service_uid': service.UID(), |
|
1791
|
|
|
'partition': row['partition'] |
|
1792
|
|
|
} |
|
1793
|
|
|
) |
|
1794
|
|
|
|
|
1795
|
|
|
def load_artemplate_partitions(self): |
|
1796
|
|
|
sheetname = 'AR Template Partitions' |
|
1797
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1798
|
|
|
self.artemplate_partitions = {} |
|
1799
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1800
|
|
|
if not worksheet: |
|
1801
|
|
|
return |
|
1802
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1803
|
|
|
if row['ARTemplate'] not in self.artemplate_partitions.keys(): |
|
1804
|
|
|
self.artemplate_partitions[row['ARTemplate']] = [] |
|
1805
|
|
|
container = self.get_object(bsc, 'Container', |
|
1806
|
|
|
row.get('container')) |
|
1807
|
|
|
preservation = self.get_object(bsc, 'Preservation', |
|
1808
|
|
|
row.get('preservation')) |
|
1809
|
|
|
self.artemplate_partitions[row['ARTemplate']].append({ |
|
1810
|
|
|
'part_id': row['part_id'], |
|
1811
|
|
|
'Container': container.Title(), |
|
1812
|
|
|
'container_uid': container.UID(), |
|
1813
|
|
|
'Preservation': preservation.Title(), |
|
1814
|
|
|
'preservation_uid': preservation.UID()}) |
|
1815
|
|
|
|
|
1816
|
|
|
def Import(self): |
|
1817
|
|
|
self.load_artemplate_analyses() |
|
1818
|
|
|
self.load_artemplate_partitions() |
|
1819
|
|
|
folder = self.context.bika_setup.bika_artemplates |
|
1820
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1821
|
|
|
pc = getToolByName(self.context, 'portal_catalog') |
|
1822
|
|
|
for row in self.get_rows(3): |
|
1823
|
|
|
if not row['title']: |
|
1824
|
|
|
continue |
|
1825
|
|
|
analyses = self.artemplate_analyses[row['title']] |
|
1826
|
|
|
client_title = row['Client_title'] or 'lab' |
|
1827
|
|
|
if row['title'] in self.artemplate_partitions: |
|
1828
|
|
|
partitions = self.artemplate_partitions[row['title']] |
|
1829
|
|
|
else: |
|
1830
|
|
|
partitions = [{'part_id': 'part-1', |
|
1831
|
|
|
'container': '', |
|
1832
|
|
|
'preservation': ''}] |
|
1833
|
|
|
|
|
1834
|
|
|
if client_title == 'lab': |
|
1835
|
|
|
folder = self.context.bika_setup.bika_artemplates |
|
1836
|
|
|
else: |
|
1837
|
|
|
folder = pc(portal_type='Client', |
|
1838
|
|
|
getName=client_title)[0].getObject() |
|
1839
|
|
|
|
|
1840
|
|
|
sampletype = self.get_object(bsc, 'SampleType', |
|
1841
|
|
|
row.get('SampleType_title')) |
|
1842
|
|
|
samplepoint = self.get_object(bsc, 'SamplePoint', |
|
1843
|
|
|
row.get('SamplePoint_title')) |
|
1844
|
|
|
|
|
1845
|
|
|
obj = _createObjectByType("ARTemplate", folder, tmpID()) |
|
1846
|
|
|
obj.edit( |
|
1847
|
|
|
title=str(row['title']), |
|
1848
|
|
|
description=row.get('description', ''), |
|
1849
|
|
|
Remarks=row.get('Remarks', ''),) |
|
1850
|
|
|
obj.setSampleType(sampletype) |
|
1851
|
|
|
obj.setSamplePoint(samplepoint) |
|
1852
|
|
|
obj.setPartitions(partitions) |
|
1853
|
|
|
obj.setAnalyses(analyses) |
|
1854
|
|
|
obj.unmarkCreationFlag() |
|
1855
|
|
|
renameAfterCreation(obj) |
|
1856
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1857
|
|
|
|
|
1858
|
|
|
|
|
1859
|
|
|
class Reference_Definitions(WorksheetImporter): |
|
1860
|
|
|
|
|
1861
|
|
|
def load_reference_definition_results(self): |
|
1862
|
|
|
sheetname = 'Reference Definition Results' |
|
1863
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1864
|
|
|
if not worksheet: |
|
1865
|
|
|
sheetname = 'Reference Definition Values' |
|
1866
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1867
|
|
|
if not worksheet: |
|
1868
|
|
|
return |
|
1869
|
|
|
self.results = {} |
|
1870
|
|
|
if not worksheet: |
|
1871
|
|
|
return |
|
1872
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1873
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1874
|
|
|
if row['ReferenceDefinition_title'] not in self.results.keys(): |
|
1875
|
|
|
self.results[row['ReferenceDefinition_title']] = [] |
|
1876
|
|
|
service = self.get_object(bsc, 'AnalysisService', |
|
1877
|
|
|
row.get('service')) |
|
1878
|
|
|
self.results[ |
|
1879
|
|
|
row['ReferenceDefinition_title']].append({ |
|
1880
|
|
|
'uid': service.UID(), |
|
1881
|
|
|
'result': row['result'] if row['result'] else '0', |
|
1882
|
|
|
'min': row['min'] if row['min'] else '0', |
|
1883
|
|
|
'max': row['max'] if row['max'] else '0'}) |
|
1884
|
|
|
|
|
1885
|
|
|
def Import(self): |
|
1886
|
|
|
self.load_reference_definition_results() |
|
1887
|
|
|
folder = self.context.bika_setup.bika_referencedefinitions |
|
1888
|
|
|
for row in self.get_rows(3): |
|
1889
|
|
|
if not row['title']: |
|
1890
|
|
|
continue |
|
1891
|
|
|
obj = _createObjectByType("ReferenceDefinition", folder, tmpID()) |
|
1892
|
|
|
obj.edit( |
|
1893
|
|
|
title=row['title'], |
|
1894
|
|
|
description=row.get('description', ''), |
|
1895
|
|
|
Blank=self.to_bool(row['Blank']), |
|
1896
|
|
|
ReferenceResults=self.results.get(row['title'], []), |
|
1897
|
|
|
Hazardous=self.to_bool(row['Hazardous'])) |
|
1898
|
|
|
obj.unmarkCreationFlag() |
|
1899
|
|
|
renameAfterCreation(obj) |
|
1900
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1901
|
|
|
|
|
1902
|
|
|
|
|
1903
|
|
|
class Worksheet_Templates(WorksheetImporter): |
|
1904
|
|
|
|
|
1905
|
|
|
def load_wst_layouts(self): |
|
1906
|
|
|
sheetname = 'Worksheet Template Layouts' |
|
1907
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1908
|
|
|
self.wst_layouts = {} |
|
1909
|
|
|
if not worksheet: |
|
1910
|
|
|
return |
|
1911
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1912
|
|
|
if row['WorksheetTemplate_title'] \ |
|
1913
|
|
|
not in self.wst_layouts.keys(): |
|
1914
|
|
|
self.wst_layouts[ |
|
1915
|
|
|
row['WorksheetTemplate_title']] = [] |
|
1916
|
|
|
self.wst_layouts[ |
|
1917
|
|
|
row['WorksheetTemplate_title']].append({ |
|
1918
|
|
|
'pos': row['pos'], |
|
1919
|
|
|
'type': row['type'], |
|
1920
|
|
|
'blank_ref': row['blank_ref'], |
|
1921
|
|
|
'control_ref': row['control_ref'], |
|
1922
|
|
|
'dup': row['dup']}) |
|
1923
|
|
|
|
|
1924
|
|
View Code Duplication |
def load_wst_services(self): |
|
|
|
|
|
|
1925
|
|
|
sheetname = 'Worksheet Template Services' |
|
1926
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
1927
|
|
|
self.wst_services = {} |
|
1928
|
|
|
if not worksheet: |
|
1929
|
|
|
return |
|
1930
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
1931
|
|
|
for row in self.get_rows(3, worksheet=worksheet): |
|
1932
|
|
|
service = self.get_object(bsc, 'AnalysisService', |
|
1933
|
|
|
row.get('service')) |
|
1934
|
|
|
if row['WorksheetTemplate_title'] not in self.wst_services.keys(): |
|
1935
|
|
|
self.wst_services[row['WorksheetTemplate_title']] = [] |
|
1936
|
|
|
self.wst_services[ |
|
1937
|
|
|
row['WorksheetTemplate_title']].append(service.UID()) |
|
1938
|
|
|
|
|
1939
|
|
|
def Import(self): |
|
1940
|
|
|
self.load_wst_services() |
|
1941
|
|
|
self.load_wst_layouts() |
|
1942
|
|
|
folder = self.context.bika_setup.bika_worksheettemplates |
|
1943
|
|
|
for row in self.get_rows(3): |
|
1944
|
|
|
if row['title']: |
|
1945
|
|
|
obj = _createObjectByType("WorksheetTemplate", folder, tmpID()) |
|
1946
|
|
|
obj.edit( |
|
1947
|
|
|
title=row['title'], |
|
1948
|
|
|
description=row.get('description', ''), |
|
1949
|
|
|
Layout=self.wst_layouts[row['title']]) |
|
1950
|
|
|
obj.setService(self.wst_services[row['title']]) |
|
1951
|
|
|
obj.unmarkCreationFlag() |
|
1952
|
|
|
renameAfterCreation(obj) |
|
1953
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
1954
|
|
|
|
|
1955
|
|
|
|
|
1956
|
|
|
class Setup(WorksheetImporter): |
|
1957
|
|
|
|
|
1958
|
|
|
|
|
1959
|
|
|
def get_field_value(self, field, value): |
|
1960
|
|
|
if value is None: |
|
1961
|
|
|
return None |
|
1962
|
|
|
converters = { |
|
1963
|
|
|
"integer": self.to_integer_value, |
|
1964
|
|
|
"fixedpoint": self.to_fixedpoint_value, |
|
1965
|
|
|
"boolean": self.to_boolean_value, |
|
1966
|
|
|
"string": self.to_string_value, |
|
1967
|
|
|
"reference": self.to_reference_value, |
|
1968
|
|
|
"duration": self.to_duration_value |
|
1969
|
|
|
} |
|
1970
|
|
|
try: |
|
1971
|
|
|
return converters.get(field.type, None)(field, value) |
|
1972
|
|
|
except: |
|
1973
|
|
|
logger.error("No valid type for Setup.{} ({}): {}" |
|
1974
|
|
|
.format(field.getName(), field.type, value)) |
|
1975
|
|
|
|
|
1976
|
|
|
def to_integer_value(self, field, value): |
|
1977
|
|
|
return str(int(value)) |
|
1978
|
|
|
|
|
1979
|
|
|
def to_fixedpoint_value(self, field, value): |
|
1980
|
|
|
return str(float(value)) |
|
1981
|
|
|
|
|
1982
|
|
|
def to_boolean_value(self, field, value): |
|
1983
|
|
|
return self.to_bool(value) |
|
1984
|
|
|
|
|
1985
|
|
|
def to_string_value(self, field, value): |
|
1986
|
|
|
if field.vocabulary: |
|
1987
|
|
|
return self.to_string_vocab_value(field, value) |
|
1988
|
|
|
return value and str(value) or "" |
|
1989
|
|
|
|
|
1990
|
|
|
def to_reference_value(self, field, value): |
|
1991
|
|
|
if not value: |
|
1992
|
|
|
return None |
|
1993
|
|
|
|
|
1994
|
|
|
brains = api.search({"title": to_unicode(value)}) |
|
1995
|
|
|
if brains: |
|
1996
|
|
|
return api.get_uid(brains[0]) |
|
1997
|
|
|
|
|
1998
|
|
|
msg = "No object found for Setup.{0} ({1}): {2}" |
|
1999
|
|
|
msg = msg.format(field.getName(), field.type, value) |
|
2000
|
|
|
logger.error(msg) |
|
2001
|
|
|
raise ValueError(msg) |
|
2002
|
|
|
|
|
2003
|
|
|
def to_string_vocab_value(self, field, value): |
|
2004
|
|
|
vocabulary = field.vocabulary |
|
2005
|
|
|
if type(vocabulary) is str: |
|
2006
|
|
|
vocabulary = getFromString(api.get_setup(), vocabulary) |
|
2007
|
|
|
else: |
|
2008
|
|
|
vocabulary = vocabulary.items() |
|
2009
|
|
|
|
|
2010
|
|
|
if not vocabulary: |
|
2011
|
|
|
raise ValueError("Empty vocabulary for {}".format(field.getName())) |
|
2012
|
|
|
|
|
2013
|
|
|
if type(vocabulary) in (tuple, list): |
|
2014
|
|
|
vocabulary = {item[0]: item[1] for item in vocabulary} |
|
2015
|
|
|
|
|
2016
|
|
|
for key, val in vocabulary.items(): |
|
2017
|
|
|
key_low = str(to_utf8(key)).lower() |
|
2018
|
|
|
val_low = str(to_utf8(val)).lower() |
|
2019
|
|
|
value_low = str(value).lower() |
|
2020
|
|
|
if key_low == value_low or val_low == value_low: |
|
2021
|
|
|
return key |
|
2022
|
|
|
raise ValueError("Vocabulary entry not found") |
|
2023
|
|
|
|
|
2024
|
|
|
def to_duration_value(self, field, values): |
|
2025
|
|
|
duration = ["days", "hours", "minutes"] |
|
2026
|
|
|
duration = map(lambda d: "{}_{}".format(field.getName(), d), duration) |
|
2027
|
|
|
return dict( |
|
2028
|
|
|
days=api.to_int(values.get(duration[0], 0), 0), |
|
2029
|
|
|
hours=api.to_int(values.get(duration[1], 0), 0), |
|
2030
|
|
|
minutes=api.to_int(values.get(duration[2], 0), 0)) |
|
2031
|
|
|
|
|
2032
|
|
|
def Import(self): |
|
2033
|
|
|
values = {} |
|
2034
|
|
|
for row in self.get_rows(3): |
|
2035
|
|
|
values[row['Field']] = row['Value'] |
|
2036
|
|
|
|
|
2037
|
|
|
bsetup = self.context.bika_setup |
|
2038
|
|
|
bschema = bsetup.Schema() |
|
2039
|
|
|
for field in bschema.fields(): |
|
2040
|
|
|
value = None |
|
2041
|
|
|
field_name = field.getName() |
|
2042
|
|
|
if field_name in values: |
|
2043
|
|
|
value = self.get_field_value(field, values[field_name]) |
|
2044
|
|
|
elif field.type == "duration": |
|
2045
|
|
|
value = self.get_field_value(field, values) |
|
2046
|
|
|
|
|
2047
|
|
|
if value is None: |
|
2048
|
|
|
continue |
|
2049
|
|
|
try: |
|
2050
|
|
|
obj_field = bsetup.getField(field_name) |
|
2051
|
|
|
obj_field.set(bsetup, str(value)) |
|
2052
|
|
|
except: |
|
2053
|
|
|
logger.error("No valid type for Setup.{} ({}): {}" |
|
2054
|
|
|
.format(field_name, field.type, value)) |
|
2055
|
|
|
|
|
2056
|
|
|
|
|
2057
|
|
|
class ID_Prefixes(WorksheetImporter): |
|
2058
|
|
|
|
|
2059
|
|
|
def Import(self): |
|
2060
|
|
|
prefixes = self.context.bika_setup.getIDFormatting() |
|
2061
|
|
|
for row in self.get_rows(3): |
|
2062
|
|
|
# remove existing prefix from list |
|
2063
|
|
|
prefixes = [p for p in prefixes |
|
2064
|
|
|
if p['portal_type'] != row['portal_type']] |
|
2065
|
|
|
# The spreadsheet will contain 'none' for user's visual stuff, but it means 'no separator' |
|
2066
|
|
|
separator = row.get('separator', '-') |
|
2067
|
|
|
separator = '' if separator == 'none' else separator |
|
2068
|
|
|
# add new prefix to list |
|
2069
|
|
|
prefixes.append({'portal_type': row['portal_type'], |
|
2070
|
|
|
'padding': row['padding'], |
|
2071
|
|
|
'prefix': row['prefix'], |
|
2072
|
|
|
'separator': separator}) |
|
2073
|
|
|
#self.context.bika_setup.setIDFormatting(prefixes) |
|
2074
|
|
|
|
|
2075
|
|
|
|
|
2076
|
|
|
class Attachment_Types(WorksheetImporter): |
|
2077
|
|
|
|
|
2078
|
|
|
def Import(self): |
|
2079
|
|
|
folder = self.context.bika_setup.bika_attachmenttypes |
|
2080
|
|
|
for row in self.get_rows(3): |
|
2081
|
|
|
obj = _createObjectByType("AttachmentType", folder, tmpID()) |
|
2082
|
|
|
obj.edit( |
|
2083
|
|
|
title=row['title'], |
|
2084
|
|
|
description=row.get('description', '')) |
|
2085
|
|
|
obj.unmarkCreationFlag() |
|
2086
|
|
|
renameAfterCreation(obj) |
|
2087
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
2088
|
|
|
|
|
2089
|
|
|
|
|
2090
|
|
|
class Reference_Samples(WorksheetImporter): |
|
2091
|
|
|
|
|
2092
|
|
|
def load_reference_sample_results(self, sample): |
|
2093
|
|
|
sheetname = 'Reference Sample Results' |
|
2094
|
|
|
if not hasattr(self, 'results_worksheet'): |
|
2095
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
2096
|
|
|
if not worksheet: |
|
2097
|
|
|
return |
|
2098
|
|
|
self.results_worksheet = worksheet |
|
2099
|
|
|
results = [] |
|
2100
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
2101
|
|
|
for row in self.get_rows(3, worksheet=self.results_worksheet): |
|
2102
|
|
|
if row['ReferenceSample_id'] != sample.getId(): |
|
2103
|
|
|
continue |
|
2104
|
|
|
service = self.get_object(bsc, 'AnalysisService', |
|
2105
|
|
|
row.get('AnalysisService_title')) |
|
2106
|
|
|
if not service: |
|
2107
|
|
|
warning = "Unable to load a reference sample result. Service %s not found." |
|
2108
|
|
|
logger.warning(warning, sheetname) |
|
2109
|
|
|
continue |
|
2110
|
|
|
results.append({ |
|
2111
|
|
|
'uid': service.UID(), |
|
2112
|
|
|
'result': row['result'], |
|
2113
|
|
|
'min': row['min'], |
|
2114
|
|
|
'max': row['max']}) |
|
2115
|
|
|
sample.setReferenceResults(results) |
|
2116
|
|
|
|
|
2117
|
|
|
def load_reference_analyses(self, sample): |
|
2118
|
|
|
sheetname = 'Reference Analyses' |
|
2119
|
|
|
if not hasattr(self, 'analyses_worksheet'): |
|
2120
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
2121
|
|
|
if not worksheet: |
|
2122
|
|
|
return |
|
2123
|
|
|
self.analyses_worksheet = worksheet |
|
2124
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
2125
|
|
|
for row in self.get_rows(3, worksheet=self.analyses_worksheet): |
|
2126
|
|
|
if row['ReferenceSample_id'] != sample.getId(): |
|
2127
|
|
|
continue |
|
2128
|
|
|
service = self.get_object(bsc, 'AnalysisService', |
|
2129
|
|
|
row.get('AnalysisService_title')) |
|
2130
|
|
|
# Analyses are keyed/named by service keyword |
|
2131
|
|
|
obj = _createObjectByType("ReferenceAnalysis", sample, row['id']) |
|
2132
|
|
|
obj.edit(title=row['id'], |
|
2133
|
|
|
ReferenceType=row['ReferenceType'], |
|
2134
|
|
|
Result=row['Result'], |
|
2135
|
|
|
Analyst=row['Analyst'], |
|
2136
|
|
|
Instrument=row['Instrument'], |
|
2137
|
|
|
Retested=row['Retested'] |
|
2138
|
|
|
) |
|
2139
|
|
|
obj.setService(service) |
|
2140
|
|
|
# obj.setCreators(row['creator']) |
|
2141
|
|
|
# obj.setCreationDate(row['created']) |
|
2142
|
|
|
# self.set_wf_history(obj, row['workflow_history']) |
|
2143
|
|
|
obj.unmarkCreationFlag() |
|
2144
|
|
|
|
|
2145
|
|
|
self.load_reference_analysis_interims(obj) |
|
2146
|
|
|
|
|
2147
|
|
View Code Duplication |
def load_reference_analysis_interims(self, analysis): |
|
|
|
|
|
|
2148
|
|
|
sheetname = 'Reference Analysis Interims' |
|
2149
|
|
|
if not hasattr(self, 'interim_worksheet'): |
|
2150
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
2151
|
|
|
if not worksheet: |
|
2152
|
|
|
return |
|
2153
|
|
|
self.interim_worksheet = worksheet |
|
2154
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
2155
|
|
|
interims = [] |
|
2156
|
|
|
for row in self.get_rows(3, worksheet=self.interim_worksheet): |
|
2157
|
|
|
if row['ReferenceAnalysis_id'] != analysis.getId(): |
|
2158
|
|
|
continue |
|
2159
|
|
|
interims.append({ |
|
2160
|
|
|
'keyword': row['keyword'], |
|
2161
|
|
|
'title': row['title'], |
|
2162
|
|
|
'value': row['value'], |
|
2163
|
|
|
'unit': row['unit'], |
|
2164
|
|
|
'hidden': row['hidden']}) |
|
2165
|
|
|
analysis.setInterimFields(interims) |
|
2166
|
|
|
|
|
2167
|
|
|
def Import(self): |
|
2168
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
2169
|
|
|
for row in self.get_rows(3): |
|
2170
|
|
|
if not row['id']: |
|
2171
|
|
|
continue |
|
2172
|
|
|
supplier = bsc(portal_type='Supplier', |
|
2173
|
|
|
getName=row.get('Supplier_title', ''))[0].getObject() |
|
2174
|
|
|
obj = _createObjectByType("ReferenceSample", supplier, row['id']) |
|
2175
|
|
|
ref_def = self.get_object(bsc, 'ReferenceDefinition', |
|
2176
|
|
|
row.get('ReferenceDefinition_title')) |
|
2177
|
|
|
ref_man = self.get_object(bsc, 'Manufacturer', |
|
2178
|
|
|
row.get('Manufacturer_title')) |
|
2179
|
|
|
obj.edit(title=row['id'], |
|
2180
|
|
|
description=row.get('description', ''), |
|
2181
|
|
|
Blank=self.to_bool(row['Blank']), |
|
2182
|
|
|
Hazardous=self.to_bool(row['Hazardous']), |
|
2183
|
|
|
CatalogueNumber=row['CatalogueNumber'], |
|
2184
|
|
|
LotNumber=row['LotNumber'], |
|
2185
|
|
|
Remarks=row['Remarks'], |
|
2186
|
|
|
ExpiryDate=row['ExpiryDate'], |
|
2187
|
|
|
DateSampled=row['DateSampled'], |
|
2188
|
|
|
DateReceived=row['DateReceived'], |
|
2189
|
|
|
DateOpened=row['DateOpened'], |
|
2190
|
|
|
DateExpired=row['DateExpired'], |
|
2191
|
|
|
DateDisposed=row['DateDisposed'] |
|
2192
|
|
|
) |
|
2193
|
|
|
obj.setReferenceDefinition(ref_def) |
|
2194
|
|
|
obj.setManufacturer(ref_man) |
|
2195
|
|
|
obj.unmarkCreationFlag() |
|
2196
|
|
|
|
|
2197
|
|
|
self.load_reference_sample_results(obj) |
|
2198
|
|
|
self.load_reference_analyses(obj) |
|
2199
|
|
|
|
|
2200
|
|
|
class Analysis_Requests(WorksheetImporter): |
|
2201
|
|
|
|
|
2202
|
|
|
def load_analyses(self, sample): |
|
2203
|
|
|
sheetname = 'Analyses' |
|
2204
|
|
|
if not hasattr(self, 'analyses_worksheet'): |
|
2205
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
2206
|
|
|
if not worksheet: |
|
2207
|
|
|
return |
|
2208
|
|
|
self.analyses_worksheet = worksheet |
|
2209
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
2210
|
|
|
bc = getToolByName(self.context, 'bika_catalog') |
|
2211
|
|
|
for row in self.get_rows(3, worksheet=self.analyses_worksheet): |
|
2212
|
|
|
service = bsc(portal_type='AnalysisService', |
|
2213
|
|
|
title=row['AnalysisService_title'])[0].getObject() |
|
2214
|
|
|
# analyses are keyed/named by keyword |
|
2215
|
|
|
ar = bc(portal_type='AnalysisRequest', id=row['AnalysisRequest_id'])[0].getObject() |
|
2216
|
|
|
obj = create_analysis( |
|
2217
|
|
|
ar, service, |
|
2218
|
|
|
Result=row['Result'], |
|
2219
|
|
|
ResultCaptureDate=row['ResultCaptureDate'], |
|
2220
|
|
|
Analyst=row['Analyst'], |
|
2221
|
|
|
Instrument=row['Instrument'], |
|
2222
|
|
|
Retested=self.to_bool(row['Retested']), |
|
2223
|
|
|
MaxTimeAllowed={ |
|
2224
|
|
|
'days': int(row.get('MaxTimeAllowed_days', 0)), |
|
2225
|
|
|
'hours': int(row.get('MaxTimeAllowed_hours', 0)), |
|
2226
|
|
|
'minutes': int(row.get('MaxTimeAllowed_minutes', 0)), |
|
2227
|
|
|
}, |
|
2228
|
|
|
) |
|
2229
|
|
|
|
|
2230
|
|
|
analyses = ar.objectValues('Analyses') |
|
2231
|
|
|
analyses = list(analyses) |
|
2232
|
|
|
analyses.append(obj) |
|
2233
|
|
|
ar.setAnalyses(analyses) |
|
2234
|
|
|
obj.unmarkCreationFlag() |
|
2235
|
|
|
|
|
2236
|
|
|
self.load_analysis_interims(obj) |
|
2237
|
|
|
|
|
2238
|
|
View Code Duplication |
def load_analysis_interims(self, analysis): |
|
|
|
|
|
|
2239
|
|
|
sheetname = 'Reference Analysis Interims' |
|
2240
|
|
|
if not hasattr(self, 'interim_worksheet'): |
|
2241
|
|
|
worksheet = self.workbook.get_sheet_by_name(sheetname) |
|
2242
|
|
|
if not worksheet: |
|
2243
|
|
|
return |
|
2244
|
|
|
self.interim_worksheet = worksheet |
|
2245
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
2246
|
|
|
interims = [] |
|
2247
|
|
|
for row in self.get_rows(3, worksheet=self.interim_worksheet): |
|
2248
|
|
|
if row['ReferenceAnalysis_id'] != analysis.getId(): |
|
2249
|
|
|
continue |
|
2250
|
|
|
interims.append({ |
|
2251
|
|
|
'keyword': row['keyword'], |
|
2252
|
|
|
'title': row['title'], |
|
2253
|
|
|
'value': row['value'], |
|
2254
|
|
|
'unit': row['unit'], |
|
2255
|
|
|
'hidden': row['hidden']}) |
|
2256
|
|
|
analysis.setInterimFields(interims) |
|
2257
|
|
|
|
|
2258
|
|
|
def Import(self): |
|
2259
|
|
|
bc = getToolByName(self.context, 'bika_catalog') |
|
2260
|
|
|
bsc = getToolByName(self.context, 'bika_setup_catalog') |
|
2261
|
|
|
pc = getToolByName(self.context, 'portal_catalog') |
|
2262
|
|
|
for row in self.get_rows(3): |
|
2263
|
|
|
if not row['id']: |
|
2264
|
|
|
continue |
|
2265
|
|
|
client = pc(portal_type="Client", |
|
2266
|
|
|
getName=row['Client_title'])[0].getObject() |
|
2267
|
|
|
obj = _createObjectByType("AnalysisRequest", client, row['id']) |
|
2268
|
|
|
contact = pc(portal_type="Contact", |
|
2269
|
|
|
getFullname=row['Contact_Fullname'])[0].getObject() |
|
2270
|
|
|
obj.edit( |
|
2271
|
|
|
RequestID=row['id'], |
|
2272
|
|
|
Contact=contact, |
|
2273
|
|
|
CCEmails=row['CCEmails'], |
|
2274
|
|
|
ClientOrderNumber=row['ClientOrderNumber'], |
|
2275
|
|
|
InvoiceExclude=row['InvoiceExclude'], |
|
2276
|
|
|
DateReceived=row['DateReceived'], |
|
2277
|
|
|
DatePublished=row['DatePublished'], |
|
2278
|
|
|
Remarks=row['Remarks'] |
|
2279
|
|
|
) |
|
2280
|
|
|
if row['CCContact_Fullname']: |
|
2281
|
|
|
contact = pc(portal_type="Contact", |
|
2282
|
|
|
getFullname=row['CCContact_Fullname'])[0].getObject() |
|
2283
|
|
|
obj.setCCContact(contact) |
|
2284
|
|
|
if row['AnalysisProfile_title']: |
|
2285
|
|
|
profile = pc(portal_type="AnalysisProfile", |
|
2286
|
|
|
title=row['AnalysisProfile_title'].getObject()) |
|
2287
|
|
|
obj.setProfile(profile) |
|
2288
|
|
|
if row['ARTemplate_title']: |
|
2289
|
|
|
template = pc(portal_type="ARTemplate", |
|
2290
|
|
|
title=row['ARTemplate_title'])[0].getObject() |
|
2291
|
|
|
obj.setProfile(template) |
|
2292
|
|
|
|
|
2293
|
|
|
obj.unmarkCreationFlag() |
|
2294
|
|
|
|
|
2295
|
|
|
self.load_analyses(obj) |
|
2296
|
|
|
|
|
2297
|
|
|
|
|
2298
|
|
|
class Invoice_Batches(WorksheetImporter): |
|
2299
|
|
|
|
|
2300
|
|
|
def Import(self): |
|
2301
|
|
|
folder = self.context.invoices |
|
2302
|
|
|
for row in self.get_rows(3): |
|
2303
|
|
|
obj = _createObjectByType("InvoiceBatch", folder, tmpID()) |
|
2304
|
|
|
if not row['title']: |
|
2305
|
|
|
message = _("InvoiceBatch has no Title") |
|
2306
|
|
|
raise Exception(t(message)) |
|
2307
|
|
|
if not row['start']: |
|
2308
|
|
|
message = _("InvoiceBatch has no Start Date") |
|
2309
|
|
|
raise Exception(t(message)) |
|
2310
|
|
|
if not row['end']: |
|
2311
|
|
|
message = _("InvoiceBatch has no End Date") |
|
2312
|
|
|
raise Exception(t(message)) |
|
2313
|
|
|
obj.edit( |
|
2314
|
|
|
title=row['title'], |
|
2315
|
|
|
BatchStartDate=row['start'], |
|
2316
|
|
|
BatchEndDate=row['end'], |
|
2317
|
|
|
) |
|
2318
|
|
|
renameAfterCreation(obj) |
|
2319
|
|
|
notify(ObjectInitializedEvent(obj)) |
|
2320
|
|
|
|