1
|
|
|
#!/usr/bin/env python |
2
|
|
|
# -*- coding: UTF-8 -*- |
3
|
|
|
|
4
|
|
|
# Isomer - The distributed application framework |
5
|
|
|
# ============================================== |
6
|
|
|
# Copyright (C) 2011-2020 Heiko 'riot' Weinen <[email protected]> and others. |
7
|
|
|
# |
8
|
|
|
# This program is free software: you can redistribute it and/or modify |
9
|
|
|
# it under the terms of the GNU Affero General Public License as published by |
10
|
|
|
# the Free Software Foundation, either version 3 of the License, or |
11
|
|
|
# (at your option) any later version. |
12
|
|
|
# |
13
|
|
|
# This program is distributed in the hope that it will be useful, |
14
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
15
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
16
|
|
|
# GNU Affero General Public License for more details. |
17
|
|
|
# |
18
|
|
|
# You should have received a copy of the GNU Affero General Public License |
19
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
20
|
|
|
|
21
|
|
|
"""Database backup functionality""" |
22
|
|
|
|
23
|
|
|
import json |
24
|
|
|
from ast import literal_eval |
25
|
|
|
|
26
|
|
|
import bson |
27
|
|
|
import pymongo |
28
|
|
|
|
29
|
|
|
from isomer.logger import isolog, debug, verbose, error, warn |
30
|
|
|
|
31
|
|
|
|
32
|
|
|
def backup_log(*args, **kwargs): |
33
|
|
|
"""Log as emitter 'BACKUP'""" |
34
|
|
|
kwargs.update({"emitter": "BACKUP", "frame_ref": 2}) |
35
|
|
|
isolog(*args, **kwargs) |
36
|
|
|
|
37
|
|
|
|
38
|
|
|
def dump(db_host, db_port, db_name, filename): |
39
|
|
|
"""Dump a full database to JSON""" |
40
|
|
|
|
41
|
|
|
backup_log("Connecting database", db_host, db_port, db_name, lvl=debug) |
42
|
|
|
|
43
|
|
|
client = pymongo.MongoClient(host=str(db_host), port=int(db_port)) |
44
|
|
|
db = client[str(db_name)] |
45
|
|
|
|
46
|
|
|
backup_log("Dumping data from database", db_name) |
47
|
|
|
|
48
|
|
|
content = [] |
49
|
|
|
|
50
|
|
|
for collection_name in db.collection_names(): |
51
|
|
|
backup_log("Archiving collection:", collection_name, lvl=debug) |
52
|
|
|
collection = db[collection_name] |
53
|
|
|
cursor = collection.find({}) |
54
|
|
|
|
55
|
|
|
objects = [] |
56
|
|
|
|
57
|
|
|
for document in cursor: |
58
|
|
|
backup_log( |
59
|
|
|
"Archiving:", |
60
|
|
|
document[:50] if len(document) >= 50 else document, |
61
|
|
|
lvl=verbose, |
62
|
|
|
) |
63
|
|
|
document["_id"] = str(document["_id"]) |
64
|
|
|
objects.append(document) |
65
|
|
|
|
66
|
|
|
collection = {"collection": collection_name, "data": objects} |
67
|
|
|
content.append(collection) |
68
|
|
|
|
69
|
|
|
with open(filename, "w") as file: |
70
|
|
|
json.dump(content, file) |
71
|
|
|
|
72
|
|
|
backup_log("Done") |
73
|
|
|
|
74
|
|
|
return True |
75
|
|
|
|
76
|
|
|
|
77
|
|
|
def load(db_host, db_port, db_name, filename): |
78
|
|
|
"""Load a full database dump from JSON""" |
79
|
|
|
|
80
|
|
|
backup_log("Connecting database") |
81
|
|
|
|
82
|
|
|
client = pymongo.MongoClient(db_host, db_port) |
83
|
|
|
db = client[db_name] |
84
|
|
|
|
85
|
|
|
backup_log("Loading data") |
86
|
|
|
|
87
|
|
|
with open(filename, "r") as file: |
88
|
|
|
data = json.load(file) |
89
|
|
|
|
90
|
|
|
backup_log("Storing data to database") |
91
|
|
|
|
92
|
|
|
for import_item in data: |
93
|
|
|
collection_name = import_item["collection"] |
94
|
|
|
|
95
|
|
|
collection = db[collection_name] |
96
|
|
|
requests = [] |
97
|
|
|
|
98
|
|
|
for document in import_item["data"]: |
99
|
|
|
document["_id"] = bson.ObjectId(document["_id"]) |
100
|
|
|
requests.append( |
101
|
|
|
pymongo.ReplaceOne({"uuid": document["uuid"]}, document, upsert=True) |
102
|
|
|
) |
103
|
|
|
|
104
|
|
|
size = len(requests) |
105
|
|
|
|
106
|
|
|
if size > 0: |
107
|
|
|
collection.bulk_write(requests) |
108
|
|
|
backup_log( |
109
|
|
|
"Imported %i object%s into collection '%s'" |
110
|
|
|
% (size, "s" if size != 1 else "", collection_name) |
111
|
|
|
) |
112
|
|
|
|
113
|
|
|
backup_log("Done") |
114
|
|
|
|
115
|
|
|
return True |
116
|
|
|
|
117
|
|
|
|
118
|
|
|
def backup( |
119
|
|
|
schema, uuid, export_filter, export_format, filename, pretty, export_all, omit |
120
|
|
|
): |
121
|
|
|
"""Exports all collections to (JSON-) files.""" |
122
|
|
|
|
123
|
|
|
from isomer.database import objectmodels |
124
|
|
|
|
125
|
|
|
export_format = export_format.upper() |
126
|
|
|
|
127
|
|
|
if pretty: |
128
|
|
|
indent = 4 |
129
|
|
|
else: |
130
|
|
|
indent = 0 |
131
|
|
|
|
132
|
|
|
f = None |
133
|
|
|
|
134
|
|
|
if filename: |
135
|
|
|
try: |
136
|
|
|
f = open(filename, "w") |
137
|
|
|
except (IOError, PermissionError) as e: |
138
|
|
|
backup_log("Could not open output file for writing:", exc=True, lvl=error) |
139
|
|
|
return |
140
|
|
|
|
141
|
|
|
def output(what, convert=False): |
142
|
|
|
"""Output the backup in a specified format.""" |
143
|
|
|
|
144
|
|
|
if convert: |
145
|
|
|
if export_format == "JSON": |
146
|
|
|
data = json.dumps(what, indent=indent) |
147
|
|
|
else: |
148
|
|
|
data = "" |
149
|
|
|
else: |
150
|
|
|
data = what |
151
|
|
|
|
152
|
|
|
if not filename: |
153
|
|
|
# Do not use logger here! This data must go immediately to stdout. |
154
|
|
|
print(data) |
155
|
|
|
else: |
156
|
|
|
f.write(data) |
157
|
|
|
|
158
|
|
|
if schema is None: |
159
|
|
|
if export_all is False: |
160
|
|
|
backup_log("No schema given.", lvl=warn) |
161
|
|
|
return |
162
|
|
|
else: |
163
|
|
|
schemata = objectmodels.keys() |
164
|
|
|
else: |
165
|
|
|
schemata = [schema] |
166
|
|
|
|
167
|
|
|
all_items = {} |
168
|
|
|
|
169
|
|
|
for schema_item in schemata: |
170
|
|
|
model = objectmodels[schema_item] |
171
|
|
|
|
172
|
|
|
if uuid: |
173
|
|
|
obj = model.find({"uuid": uuid}) |
174
|
|
|
elif export_filter: |
175
|
|
|
obj = model.find(literal_eval(export_filter)) |
176
|
|
|
else: |
177
|
|
|
obj = model.find() |
178
|
|
|
|
179
|
|
|
items = [] |
180
|
|
|
for item in obj: |
181
|
|
|
fields = item.serializablefields() |
182
|
|
|
for field in omit: |
183
|
|
|
try: |
184
|
|
|
fields.pop(field) |
185
|
|
|
except KeyError: |
186
|
|
|
pass |
187
|
|
|
items.append(fields) |
188
|
|
|
|
189
|
|
|
all_items[schema_item] = items |
190
|
|
|
|
191
|
|
|
# if pretty is True: |
192
|
|
|
# output('\n// Objectmodel: ' + schema_item + '\n\n') |
193
|
|
|
# output(schema_item + ' = [\n') |
194
|
|
|
|
195
|
|
|
output(all_items, convert=True) |
196
|
|
|
|
197
|
|
|
if f is not None: |
198
|
|
|
f.flush() |
199
|
|
|
f.close() |
200
|
|
|
|
201
|
|
|
|
202
|
|
|
def internal_restore( |
203
|
|
|
schema, uuid, object_filter, import_format, filename, all_schemata, dry |
204
|
|
|
): |
205
|
|
|
"""Foobar""" |
206
|
|
|
|
207
|
|
|
from isomer.database import objectmodels |
208
|
|
|
|
209
|
|
|
import_format = import_format.upper() |
210
|
|
|
|
211
|
|
|
if import_format == "JSON": |
212
|
|
|
with open(filename, "r") as f: |
213
|
|
|
json_data = f.read() |
214
|
|
|
data = json.loads(json_data) # , parse_float=True, parse_int=True) |
215
|
|
|
else: |
216
|
|
|
backup_log("Importing non json data is WiP!", lvl=error) |
217
|
|
|
return |
218
|
|
|
|
219
|
|
|
if schema is None: |
220
|
|
|
if all_schemata is False: |
221
|
|
|
backup_log("No schema given. Read the help", lvl=warn) |
222
|
|
|
return |
223
|
|
|
else: |
224
|
|
|
schemata = data.keys() |
225
|
|
|
else: |
226
|
|
|
schemata = [schema] |
227
|
|
|
|
228
|
|
|
if object_filter is not None: |
229
|
|
|
backup_log("Object filtering on import is WiP! Ignoring for now.", lvl=warn) |
230
|
|
|
|
231
|
|
|
all_items = {} |
232
|
|
|
total = 0 |
233
|
|
|
|
234
|
|
|
for schema_item in schemata: |
235
|
|
|
model = objectmodels[schema_item] |
236
|
|
|
|
237
|
|
|
objects = data[schema_item] |
238
|
|
|
items = [] |
239
|
|
|
if uuid: |
240
|
|
|
for item in objects: |
241
|
|
|
if item["uuid"] == uuid: |
242
|
|
|
items = [model(item)] |
243
|
|
|
else: |
244
|
|
|
for item in objects: |
245
|
|
|
thing = model(item) |
246
|
|
|
items.append(thing) |
247
|
|
|
|
248
|
|
|
schema_total = len(items) |
249
|
|
|
total += schema_total |
250
|
|
|
|
251
|
|
|
if dry: |
252
|
|
|
backup_log("Would import", schema_total, "items of", schema_item) |
253
|
|
|
all_items[schema_item] = items |
254
|
|
|
|
255
|
|
|
if dry: |
256
|
|
|
backup_log("Would import", total, "objects.") |
257
|
|
|
else: |
258
|
|
|
backup_log("Importing", total, "objects.") |
259
|
|
|
for schema_name, item_list in all_items.items(): |
260
|
|
|
backup_log("Importing", len(item_list), "objects of type", schema_name) |
261
|
|
|
for item in item_list: |
262
|
|
|
item._fields["_id"] = bson.objectid.ObjectId(item._fields["_id"]) |
263
|
|
|
item.save() |
264
|
|
|
|