|
1
|
|
|
# -*- coding: utf-8 -*- |
|
2
|
|
|
|
|
3
|
|
|
import json |
|
4
|
|
|
from xml.dom.minidom import parseString |
|
5
|
|
|
|
|
6
|
|
|
from bika.lims import api |
|
7
|
|
|
from bika.lims import logger |
|
8
|
|
|
from bika.lims.interfaces import IAuditable |
|
9
|
|
|
from bika.lims.interfaces import ISenaiteSiteRoot |
|
10
|
|
|
from DateTime import DateTime |
|
11
|
|
|
from OFS.interfaces import IOrderedContainer |
|
12
|
|
|
from Products.Archetypes.interfaces import IBaseObject |
|
13
|
|
|
from Products.CMFPlone.utils import _createObjectByType |
|
14
|
|
|
from Products.CMFPlone.utils import safe_unicode |
|
15
|
|
|
from Products.GenericSetup.interfaces import IBody |
|
16
|
|
|
from Products.GenericSetup.interfaces import INode |
|
17
|
|
|
from Products.GenericSetup.interfaces import ISetupEnviron |
|
18
|
|
|
from Products.GenericSetup.utils import ObjectManagerHelpers |
|
19
|
|
|
from Products.GenericSetup.utils import XMLAdapterBase |
|
20
|
|
|
from zope.component import adapts |
|
21
|
|
|
from zope.component import queryMultiAdapter |
|
22
|
|
|
from zope.interface import alsoProvides |
|
23
|
|
|
|
|
24
|
|
|
from .config import SITE_ID |
|
25
|
|
|
|
|
26
|
|
|
# Global UID mapping for reference fiedls |
|
27
|
|
|
UID_MAP = {} |
|
28
|
|
|
|
|
29
|
|
|
SKIP_TYPES = [ |
|
30
|
|
|
"ARReport", |
|
31
|
|
|
"AnalysisRequest", |
|
32
|
|
|
"Attachment", |
|
33
|
|
|
"Batch", |
|
34
|
|
|
"Worksheet", |
|
35
|
|
|
] |
|
36
|
|
|
|
|
37
|
|
|
|
|
38
|
|
|
class SenaiteSiteXMLAdapter(XMLAdapterBase, ObjectManagerHelpers): |
|
39
|
|
|
adapts(ISenaiteSiteRoot, ISetupEnviron) |
|
40
|
|
|
|
|
41
|
|
|
def __init__(self, context, environ): |
|
42
|
|
|
super(SenaiteSiteXMLAdapter, self).__init__(context, environ) |
|
43
|
|
|
|
|
44
|
|
|
def _exportNode(self): |
|
45
|
|
|
"""Export the object as a DOM node. |
|
46
|
|
|
""" |
|
47
|
|
|
node = self._getObjectNode("object") |
|
48
|
|
|
|
|
49
|
|
|
# remember the UID of the item for reference fields |
|
50
|
|
|
node.setAttribute("uid", "0") |
|
51
|
|
|
|
|
52
|
|
|
# Extract all contained objects |
|
53
|
|
|
node.appendChild(self._extractObjects()) |
|
54
|
|
|
|
|
55
|
|
|
# Extract Groups |
|
56
|
|
|
node.appendChild(self._extractGroups(self.context)) |
|
57
|
|
|
|
|
58
|
|
|
# Extract Users |
|
59
|
|
|
node.appendChild(self._extractUsers(self.context)) |
|
60
|
|
|
|
|
61
|
|
|
return node |
|
62
|
|
|
|
|
63
|
|
|
def _importNode(self, node): |
|
64
|
|
|
"""Import the object from the DOM node. |
|
65
|
|
|
""" |
|
66
|
|
|
obj_id = str(node.getAttribute("name")) |
|
67
|
|
|
|
|
68
|
|
|
if "acl_users" not in self.context: |
|
69
|
|
|
return |
|
70
|
|
|
|
|
71
|
|
|
# Add groups and users |
|
72
|
|
|
self._initGroups(self.context, node) |
|
73
|
|
|
self._initUsers(self.context, node) |
|
74
|
|
|
|
|
75
|
|
|
self._logger.info("Imported '%r'" % obj_id) |
|
76
|
|
|
|
|
77
|
|
|
def _initGroups(self, context, node): |
|
78
|
|
|
group_tool = api.get_tool("portal_groups") |
|
79
|
|
|
for child in node.childNodes: |
|
80
|
|
|
if child.nodeName != "groups": |
|
81
|
|
|
continue |
|
82
|
|
|
for cn in child.childNodes: |
|
83
|
|
|
if cn.nodeName != "group": |
|
84
|
|
|
continue |
|
85
|
|
|
group_id = cn.firstChild.nodeValue |
|
86
|
|
|
group = api.user.get_group(group_id) |
|
87
|
|
|
if not group: |
|
88
|
|
|
self._logger.info("Adding group {}".format(group_id)) |
|
89
|
|
|
roles = cn.getAttribute("roles").split(",") |
|
90
|
|
|
group_tool.addGroup(group_id, roles=roles) |
|
91
|
|
|
group = group_tool.getGroupById(group_id) |
|
92
|
|
|
|
|
93
|
|
|
# set the group properties |
|
94
|
|
|
group.setProperties(properties={ |
|
95
|
|
|
"title": cn.getAttribute("name"), |
|
96
|
|
|
"email": cn.getAttribute("email"), |
|
97
|
|
|
}) |
|
98
|
|
|
|
|
99
|
|
|
def _initUsers(self, context, node): |
|
100
|
|
|
reg_tool = api.get_tool("portal_registration") |
|
101
|
|
|
for child in node.childNodes: |
|
102
|
|
|
if child.nodeName != "users": |
|
103
|
|
|
continue |
|
104
|
|
|
for cn in child.childNodes: |
|
105
|
|
|
if cn.nodeName != "user": |
|
106
|
|
|
continue |
|
107
|
|
|
user_id = cn.firstChild.nodeValue |
|
108
|
|
|
user = api.user.get_user(user_id) |
|
109
|
|
|
if not user: |
|
110
|
|
|
self._logger.info("Adding user {}".format(user_id)) |
|
111
|
|
|
# add a new user with the same password as the user id |
|
112
|
|
|
user = reg_tool.addMember(user_id, user_id) |
|
113
|
|
|
|
|
114
|
|
|
# set the user properties |
|
115
|
|
|
user.setProperties(properties={ |
|
116
|
|
|
"fullname": cn.getAttribute("name"), |
|
117
|
|
|
"email": cn.getAttribute("email"), |
|
118
|
|
|
}) |
|
119
|
|
|
|
|
120
|
|
|
# add the user to the groups |
|
121
|
|
|
groups = cn.getAttribute("groups") |
|
122
|
|
|
if groups: |
|
123
|
|
|
group_ids = groups.split(",") |
|
124
|
|
|
api.user.add_group(group_ids, user_id) |
|
125
|
|
|
|
|
126
|
|
|
def _get_users(self): |
|
127
|
|
|
acl_users = api.get_tool("acl_users") |
|
128
|
|
|
return acl_users.getUsers() |
|
129
|
|
|
|
|
130
|
|
|
def _get_groups(self): |
|
131
|
|
|
acl_users = api.get_tool("acl_users") |
|
132
|
|
|
return acl_users.getGroups() |
|
133
|
|
|
|
|
134
|
|
|
def _get_roles_for_principal(self, principal): |
|
135
|
|
|
"""Returs a list of roles for the user/group |
|
136
|
|
|
""" |
|
137
|
|
|
ignored_roles = ["Authenticated"] |
|
138
|
|
|
roles = filter(lambda r: r not in ignored_roles, |
|
139
|
|
|
principal.getRoles()) |
|
140
|
|
|
return roles |
|
141
|
|
|
|
|
142
|
|
|
def _get_groups_for_principal(self, principal): |
|
143
|
|
|
"""Returs a list of groups for the user/group |
|
144
|
|
|
""" |
|
145
|
|
|
ignored_groups = ["AuthenticatedUsers"] |
|
146
|
|
|
groups = filter(lambda r: r not in ignored_groups, |
|
147
|
|
|
principal.getGroupIds()) |
|
148
|
|
|
return groups |
|
149
|
|
|
|
|
150
|
|
|
def _extractGroups(self, context): |
|
151
|
|
|
node = self._doc.createElement("groups") |
|
152
|
|
|
for group in self._get_groups(): |
|
153
|
|
|
name = group.getGroupName() |
|
154
|
|
|
roles = self._get_roles_for_principal(group) |
|
155
|
|
|
child = self._doc.createElement("group") |
|
156
|
|
|
child.setAttribute("name", safe_unicode(name)) |
|
157
|
|
|
child.setAttribute("roles", ",".join(roles)) |
|
158
|
|
|
text = self._doc.createTextNode(group.getGroupId()) |
|
159
|
|
|
child.appendChild(text) |
|
160
|
|
|
node.appendChild(child) |
|
161
|
|
|
return node |
|
162
|
|
|
|
|
163
|
|
|
def _extractUsers(self, context): |
|
164
|
|
|
node = self._doc.createElement("users") |
|
165
|
|
|
for user in self._get_users(): |
|
166
|
|
|
name = user.getProperty("fullname") |
|
167
|
|
|
groups = self._get_groups_for_principal(user) |
|
168
|
|
|
child = self._doc.createElement("user") |
|
169
|
|
|
child.setAttribute("name", safe_unicode(name)) |
|
170
|
|
|
child.setAttribute("email", user.getProperty("email")) |
|
171
|
|
|
child.setAttribute("groups", ",".join(groups)) |
|
172
|
|
|
text = self._doc.createTextNode(user.getId()) |
|
173
|
|
|
child.appendChild(text) |
|
174
|
|
|
node.appendChild(child) |
|
175
|
|
|
return node |
|
176
|
|
|
|
|
177
|
|
|
def _extractObjects(self): |
|
178
|
|
|
fragment = self._doc.createDocumentFragment() |
|
179
|
|
|
objects = self.context.objectValues() |
|
180
|
|
|
if not IOrderedContainer.providedBy(self.context): |
|
181
|
|
|
objects = list(objects) |
|
182
|
|
|
objects.sort(lambda x, y: cmp(x.getId(), y.getId())) |
|
183
|
|
|
for obj in objects: |
|
184
|
|
|
# Check if the object can be exported |
|
185
|
|
|
if not can_export(obj): |
|
186
|
|
|
logger.info("Skipping {}".format(repr(obj))) |
|
187
|
|
|
continue |
|
188
|
|
|
exporter = queryMultiAdapter((obj, self.environ), INode) |
|
189
|
|
|
if exporter: |
|
190
|
|
|
node = exporter.node |
|
191
|
|
|
if node is not None: |
|
192
|
|
|
fragment.appendChild(exporter.node) |
|
193
|
|
|
return fragment |
|
194
|
|
|
|
|
195
|
|
|
|
|
196
|
|
|
class ContentXMLAdapter(SenaiteSiteXMLAdapter): |
|
197
|
|
|
"""Content XML Importer/Exporter |
|
198
|
|
|
""" |
|
199
|
|
|
adapts(IBaseObject, ISetupEnviron) |
|
200
|
|
|
|
|
201
|
|
|
def __init__(self, context, environ): |
|
202
|
|
|
super(ContentXMLAdapter, self).__init__(context, environ) |
|
203
|
|
|
|
|
204
|
|
|
def _exportNode(self): |
|
205
|
|
|
"""Export the object as a DOM node. |
|
206
|
|
|
""" |
|
207
|
|
|
node = self._getObjectNode("object") |
|
208
|
|
|
|
|
209
|
|
|
# remember the UID of the item for reference fields |
|
210
|
|
|
node.setAttribute("uid", self.context.UID()) |
|
211
|
|
|
|
|
212
|
|
|
# remember the WF Status |
|
213
|
|
|
# TODO: Export the complete Review History |
|
214
|
|
|
state = api.get_workflow_status_of(self.context) |
|
215
|
|
|
node.setAttribute("state", state) |
|
216
|
|
|
|
|
217
|
|
|
# Extract AuditLog |
|
218
|
|
|
node.appendChild(self._extractAuditLog(self.context)) |
|
219
|
|
|
|
|
220
|
|
|
# Extract all fields of the current context |
|
221
|
|
|
node.appendChild(self._extractFields(self.context)) |
|
222
|
|
|
|
|
223
|
|
|
# Extract all contained objects |
|
224
|
|
|
node.appendChild(self._extractObjects()) |
|
225
|
|
|
|
|
226
|
|
|
return node |
|
227
|
|
|
|
|
228
|
|
|
def _importNode(self, node): |
|
229
|
|
|
"""Import the object from the DOM node. |
|
230
|
|
|
""" |
|
231
|
|
|
|
|
232
|
|
|
# set workflow state |
|
233
|
|
|
self._initAuditLog(self.context, node) |
|
234
|
|
|
self._initWorkflow(self.context, node) |
|
235
|
|
|
self._initFields(self.context, node) |
|
236
|
|
|
|
|
237
|
|
|
# reindex the object |
|
238
|
|
|
self.context.reindexObject() |
|
239
|
|
|
|
|
240
|
|
|
# set a new snapshot |
|
241
|
|
|
# api.snapshot.take_snapshot(self.context) |
|
242
|
|
|
|
|
243
|
|
|
obj_id = str(node.getAttribute("name")) |
|
244
|
|
|
self._logger.info("Imported '%r'" % obj_id) |
|
245
|
|
|
|
|
246
|
|
|
def _initAuditLog(self, context, node): |
|
247
|
|
|
for child in node.childNodes: |
|
248
|
|
|
if child.nodeName == "auditlog": |
|
249
|
|
|
snapshots = json.loads(child.firstChild.nodeValue) |
|
250
|
|
|
storage = api.snapshot.get_storage(context) |
|
251
|
|
|
storage[:] = map(json.dumps, snapshots)[:] |
|
252
|
|
|
# make sure the object provides `IAuditable` |
|
253
|
|
|
alsoProvides(context, IAuditable) |
|
254
|
|
|
return |
|
255
|
|
|
|
|
256
|
|
|
def _initWorkflow(self, context, node): |
|
257
|
|
|
state = node.getAttribute("state") |
|
258
|
|
|
|
|
259
|
|
|
if not state: |
|
260
|
|
|
return |
|
261
|
|
|
|
|
262
|
|
|
if state == api.get_workflow_status_of(context): |
|
263
|
|
|
return |
|
264
|
|
|
|
|
265
|
|
|
wf_state = { |
|
266
|
|
|
"action": None, |
|
267
|
|
|
"actor": None, |
|
268
|
|
|
"comments": "Generic Setup Import", |
|
269
|
|
|
"review_state": state, |
|
270
|
|
|
"time": DateTime(), |
|
271
|
|
|
} |
|
272
|
|
|
|
|
273
|
|
|
wf = api.get_tool("portal_workflow") |
|
274
|
|
|
wf_id = wf.getChainFor(context)[0] |
|
275
|
|
|
wf.setStatusOf(wf_id, context, wf_state) |
|
276
|
|
|
|
|
277
|
|
|
def _initFields(self, context, node): |
|
278
|
|
|
fields = api.get_fields(context) |
|
279
|
|
|
|
|
280
|
|
|
for child in node.childNodes: |
|
281
|
|
|
# we only handle filed nodes |
|
282
|
|
|
if child.nodeName != "field": |
|
283
|
|
|
continue |
|
284
|
|
|
|
|
285
|
|
|
name = child.getAttribute("name") |
|
286
|
|
|
field = fields.get(name) |
|
287
|
|
|
if field is None: |
|
288
|
|
|
self._logger.warning("Unrecognized field '{}'".format(name)) |
|
289
|
|
|
continue |
|
290
|
|
|
|
|
291
|
|
|
importer = queryMultiAdapter((context, field, self.environ), INode) |
|
292
|
|
|
if importer: |
|
293
|
|
|
importer.node = child |
|
294
|
|
|
|
|
295
|
|
|
def _extractAuditLog(self, context): |
|
296
|
|
|
snapshots = api.snapshot.get_snapshots(self.context) |
|
297
|
|
|
node = self._doc.createElement("auditlog") |
|
298
|
|
|
child = self._doc.createTextNode(json.dumps(snapshots)) |
|
299
|
|
|
node.appendChild(child) |
|
300
|
|
|
return node |
|
301
|
|
|
|
|
302
|
|
|
def _extractFields(self, context): |
|
303
|
|
|
fragment = self._doc.createDocumentFragment() |
|
304
|
|
|
|
|
305
|
|
|
fields = api.get_fields(context) |
|
306
|
|
|
for name, field in fields.items(): |
|
307
|
|
|
# query the field adapter |
|
308
|
|
|
exporter = queryMultiAdapter((context, field, self.environ), INode) |
|
309
|
|
|
if not exporter: |
|
310
|
|
|
continue |
|
311
|
|
|
node = exporter.node |
|
312
|
|
|
if node is not None: |
|
313
|
|
|
fragment.appendChild(node) |
|
314
|
|
|
return fragment |
|
315
|
|
|
|
|
316
|
|
|
|
|
317
|
|
|
def create_content_slugs(parent, parent_path, context): |
|
318
|
|
|
"""Helper function to create initial content slugs |
|
319
|
|
|
""" |
|
320
|
|
|
logger.info("create_content_slugs: parent={} parent_path={}".format( |
|
321
|
|
|
repr(parent), parent_path)) |
|
322
|
|
|
path = "%s%s" % (parent_path, get_id(parent)) |
|
323
|
|
|
filename = "%s.xml" % (path) |
|
324
|
|
|
items = dict(parent.objectItems()) |
|
325
|
|
|
|
|
326
|
|
|
xml = context.readDataFile(filename) |
|
327
|
|
|
|
|
328
|
|
|
if xml is None: |
|
329
|
|
|
logger.warn("File not exists: '{}'".format(filename)) |
|
330
|
|
|
return |
|
331
|
|
|
|
|
332
|
|
|
node = parseString(xml) |
|
333
|
|
|
|
|
334
|
|
|
if node.nodeName == "#document": |
|
335
|
|
|
node = node.firstChild |
|
336
|
|
|
|
|
337
|
|
|
name = node.getAttribute("name") |
|
338
|
|
|
uid = node.getAttribute("uid") |
|
339
|
|
|
logger.info("Processing ID '{}' (UID {}) in path '{}'" |
|
340
|
|
|
.format(name, uid, path)) |
|
341
|
|
|
|
|
342
|
|
|
# remember the UID mapping |
|
343
|
|
|
UID_MAP[uid] = api.get_uid(parent) |
|
344
|
|
|
|
|
345
|
|
|
# set the UID |
|
346
|
|
|
if uid and api.is_at_content(parent): |
|
347
|
|
|
parent._setUID(uid) |
|
348
|
|
|
# avoid renaming after edit |
|
349
|
|
|
parent.unmarkCreationFlag() |
|
350
|
|
|
elif uid and api.is_dexterity_content(parent): |
|
351
|
|
|
logger.warn("Set UID for Dexterity contents is not implemented") |
|
352
|
|
|
|
|
353
|
|
|
def is_object_node(n): |
|
354
|
|
|
return getattr(n, "nodeName", "") == "object" |
|
355
|
|
|
|
|
356
|
|
|
def get_child_nodes(n): |
|
357
|
|
|
return getattr(n, "childNodes", []) |
|
358
|
|
|
|
|
359
|
|
|
for child in get_child_nodes(node): |
|
360
|
|
|
if not is_object_node(child): |
|
361
|
|
|
continue |
|
362
|
|
|
|
|
363
|
|
|
child_id = child.getAttribute("name") |
|
364
|
|
|
portal_type = child.getAttribute("meta_type") |
|
365
|
|
|
obj = items.get(child_id) |
|
366
|
|
|
|
|
367
|
|
|
if not obj: |
|
368
|
|
|
# get the fti |
|
369
|
|
|
types_tool = api.get_tool("portal_types") |
|
370
|
|
|
fti = types_tool.getTypeInfo(portal_type) |
|
371
|
|
|
if fti and fti.product: |
|
372
|
|
|
obj = _createObjectByType(portal_type, parent, child_id) |
|
373
|
|
|
else: |
|
374
|
|
|
continue |
|
375
|
|
|
|
|
376
|
|
|
create_content_slugs(obj, path + "/", context) |
|
377
|
|
|
|
|
378
|
|
|
|
|
379
|
|
|
def can_export(obj): |
|
380
|
|
|
"""Decides if the object can be exported or not |
|
381
|
|
|
""" |
|
382
|
|
|
if not api.is_object(obj): |
|
383
|
|
|
return False |
|
384
|
|
|
if api.get_portal_type(obj) in SKIP_TYPES: |
|
385
|
|
|
return False |
|
386
|
|
|
return True |
|
387
|
|
|
|
|
388
|
|
|
|
|
389
|
|
|
def get_id(obj): |
|
390
|
|
|
if api.is_portal(obj): |
|
391
|
|
|
return SITE_ID |
|
392
|
|
|
return obj.getId().replace(" ", "_") |
|
393
|
|
|
|
|
394
|
|
|
|
|
395
|
|
|
def exportObjects(obj, parent_path, context): |
|
396
|
|
|
""" Export subobjects recursively. |
|
397
|
|
|
""" |
|
398
|
|
|
|
|
399
|
|
|
if not can_export(obj): |
|
400
|
|
|
logger.info("Skipping {}".format(repr(obj))) |
|
401
|
|
|
return |
|
402
|
|
|
|
|
403
|
|
|
exporter = queryMultiAdapter((obj, context), IBody) |
|
404
|
|
|
path = "%s%s" % (parent_path, get_id(obj)) |
|
405
|
|
|
if exporter: |
|
406
|
|
|
if exporter.name: |
|
407
|
|
|
path = "%s%s" % (parent_path, exporter.name) |
|
408
|
|
|
filename = "%s%s" % (path, exporter.suffix) |
|
409
|
|
|
body = exporter.body |
|
410
|
|
|
if body is not None: |
|
411
|
|
|
context.writeDataFile(filename, body, exporter.mime_type) |
|
412
|
|
|
|
|
413
|
|
|
if getattr(obj, "objectValues", False): |
|
414
|
|
|
for sub in obj.objectValues(): |
|
415
|
|
|
exportObjects(sub, path + "/", context) |
|
416
|
|
|
|
|
417
|
|
|
|
|
418
|
|
|
def importObjects(obj, parent_path, context): |
|
419
|
|
|
""" Import subobjects recursively. |
|
420
|
|
|
""" |
|
421
|
|
|
importer = queryMultiAdapter((obj, context), IBody) |
|
422
|
|
|
path = "%s%s" % (parent_path, get_id(obj)) |
|
423
|
|
|
__traceback_info__ = path |
|
424
|
|
|
if importer: |
|
425
|
|
|
if importer.name: |
|
426
|
|
|
path = "%s%s" % (parent_path, importer.name) |
|
427
|
|
|
filename = "%s%s" % (path, importer.suffix) |
|
428
|
|
|
body = context.readDataFile(filename) |
|
429
|
|
|
if body is not None: |
|
430
|
|
|
importer.filename = filename # for error reporting |
|
431
|
|
|
importer.body = body |
|
432
|
|
|
|
|
433
|
|
|
if getattr(obj, "objectValues", False): |
|
434
|
|
|
for sub in obj.objectValues(): |
|
435
|
|
|
importObjects(sub, path + "/", context) |
|
436
|
|
|
|
|
437
|
|
|
|
|
438
|
|
|
def export_xml(context): |
|
439
|
|
|
portal = context.getSite() |
|
440
|
|
|
exportObjects(portal, "", context) |
|
441
|
|
|
|
|
442
|
|
|
|
|
443
|
|
|
def import_xml(context): |
|
444
|
|
|
portal = context.getSite() |
|
445
|
|
|
|
|
446
|
|
|
qi = api.get_tool("portal_quickinstaller") |
|
447
|
|
|
installed = qi.isProductInstalled("bika.lims") |
|
448
|
|
|
|
|
449
|
|
|
if not installed: |
|
450
|
|
|
logger.debug("Nothing to export.") |
|
451
|
|
|
return |
|
452
|
|
|
|
|
453
|
|
|
# create content slugs for UID references |
|
454
|
|
|
create_content_slugs(portal, "", context) |
|
455
|
|
|
|
|
456
|
|
|
# import objects |
|
457
|
|
|
importObjects(portal, "", context) |
|
458
|
|
|
|