1
|
|
|
#!/usr/bin/env python |
2
|
|
|
# -*- coding: UTF-8 -*- |
3
|
|
|
|
4
|
|
|
# Isomer - The distributed application framework |
5
|
|
|
# ============================================== |
6
|
|
|
# Copyright (C) 2011-2020 Heiko 'riot' Weinen <[email protected]> and others. |
7
|
|
|
# |
8
|
|
|
# This program is free software: you can redistribute it and/or modify |
9
|
|
|
# it under the terms of the GNU Affero General Public License as published by |
10
|
|
|
# the Free Software Foundation, either version 3 of the License, or |
11
|
|
|
# (at your option) any later version. |
12
|
|
|
# |
13
|
|
|
# This program is distributed in the hope that it will be useful, |
14
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
15
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
16
|
|
|
# GNU Affero General Public License for more details. |
17
|
|
|
# |
18
|
|
|
# You should have received a copy of the GNU Affero General Public License |
19
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
20
|
|
|
|
21
|
|
|
""" |
22
|
|
|
|
23
|
|
|
Module: Migration |
24
|
|
|
================= |
25
|
|
|
|
26
|
|
|
""" |
27
|
|
|
|
28
|
|
|
# from isomer.database import schemastore |
29
|
|
|
from isomer.logger import isolog, warn, debug # , error, verbose, critical |
30
|
|
|
from deepdiff.diff import DeepDiff |
31
|
|
|
from pkg_resources import iter_entry_points, DistributionNotFound |
32
|
|
|
import dpath |
33
|
|
|
import os |
34
|
|
|
|
35
|
|
|
# import json |
36
|
|
|
|
37
|
|
|
from pprint import pprint |
38
|
|
|
|
39
|
|
|
MIGRATION_TEMPLATE = """#!/usr/bin/env python |
40
|
|
|
|
41
|
|
|
# Migration template |
42
|
|
|
|
43
|
|
|
""" |
44
|
|
|
|
45
|
|
|
|
46
|
|
|
def log(*args, **kwargs): |
47
|
|
|
"""Log as previous emitter""" |
48
|
|
|
kwargs.update({"frame_ref": 2}) |
49
|
|
|
if "emitter" not in kwargs: |
50
|
|
|
kwargs["emitter"] = "MIGRATIONS" |
51
|
|
|
isolog(*args, **kwargs) |
52
|
|
|
|
53
|
|
|
|
54
|
|
|
def make_migrations(schema=None): |
55
|
|
|
"""Create migration data for a specified schema""" |
56
|
|
|
|
57
|
|
|
entrypoints = {} |
58
|
|
|
old = {} |
59
|
|
|
|
60
|
|
|
def _apply_migrations(migrations, new_model): |
61
|
|
|
"""Apply migration data to compile an up to date model""" |
62
|
|
|
|
63
|
|
|
def get_path(raw_path): |
64
|
|
|
"""Get local path of schema definition""" |
65
|
|
|
|
66
|
|
|
log("RAW PATH:", raw_path, type(raw_path)) |
67
|
|
|
path = [] |
68
|
|
|
for item in raw_path.split("["): |
69
|
|
|
log(item) |
70
|
|
|
item = item.rstrip("]") |
71
|
|
|
item = item.replace('"', "") |
72
|
|
|
item = item.replace("'", "") |
73
|
|
|
try: |
74
|
|
|
item = int(item) |
75
|
|
|
except ValueError: |
76
|
|
|
pass |
77
|
|
|
path.append(item) |
78
|
|
|
path.remove("root") |
79
|
|
|
log("PATH:", path) |
80
|
|
|
return path |
81
|
|
|
|
82
|
|
|
def apply_entry(changetype, change, result): |
83
|
|
|
"""Upgrade with a single migration""" |
84
|
|
|
|
85
|
|
|
def apply_removes(removes, result): |
86
|
|
|
"""Delete removed fields""" |
87
|
|
|
|
88
|
|
|
for remove in removes: |
89
|
|
|
path = get_path(remove) |
90
|
|
|
amount = dpath.util.delete(result, path) |
91
|
|
|
if amount != 1: |
92
|
|
|
log("Not exactly one removed!", path, remove, lvl=warn) |
93
|
|
|
return result |
94
|
|
|
|
95
|
|
|
def apply_additions(additions, result): |
96
|
|
|
"""Add newly added fields""" |
97
|
|
|
|
98
|
|
|
for addition in additions: |
99
|
|
|
path = get_path(addition) |
100
|
|
|
entry = additions[addition] |
101
|
|
|
log("Adding:", entry, "at", path) |
102
|
|
|
dpath.util.new(result, path, entry) |
103
|
|
|
return result |
104
|
|
|
|
105
|
|
|
if changetype == "type_changes": |
106
|
|
|
log("Creating new object") |
107
|
|
|
result = change["root"]["new_value"] |
108
|
|
|
return result |
109
|
|
|
|
110
|
|
|
if changetype == "dictionary_item_added": |
111
|
|
|
log("Adding items") |
112
|
|
|
result = apply_additions(change, result) |
113
|
|
|
elif changetype == "dictionary_item_removed": |
114
|
|
|
log("Removing items") |
115
|
|
|
result = apply_removes(change, result) |
116
|
|
|
elif changetype == "values_changed": |
117
|
|
|
log("Changing items' types") |
118
|
|
|
for item in change: |
119
|
|
|
path = get_path(item) |
120
|
|
|
log( |
121
|
|
|
"Changing", |
122
|
|
|
path, |
123
|
|
|
"from", |
124
|
|
|
change[item]["old_value"], |
125
|
|
|
" to", |
126
|
|
|
change[item]["new_value"], |
127
|
|
|
) |
128
|
|
|
if dpath.util.get(result, path) != change[item]["old_value"]: |
129
|
|
|
log("Value change did not work!", lvl=warn) |
130
|
|
|
amount = dpath.util.set(result, path, change[item]["new_value"]) |
131
|
|
|
if amount != 1: |
132
|
|
|
log("Not exactly one changed!", path, item, lvl=warn) |
133
|
|
|
|
134
|
|
|
return result |
135
|
|
|
|
136
|
|
|
def get_renames(migrations): |
137
|
|
|
"""Check migrations for renamed fields""" |
138
|
|
|
|
139
|
|
|
log("Checking for rename operations:") |
140
|
|
|
# pprint(migrations) |
141
|
|
|
added = removed = None |
142
|
|
|
|
143
|
|
|
for entry in migrations: |
144
|
|
|
added = entry.get("dictionary_item_added", None) |
145
|
|
|
removed = entry.get("dictionary_item_removed", None) |
146
|
|
|
|
147
|
|
|
renames = [] |
148
|
|
|
|
149
|
|
|
if added and removed: |
150
|
|
|
for addition in added: |
151
|
|
|
path = get_path(addition) |
152
|
|
|
for removal in removed: |
153
|
|
|
removed_path = get_path(removal) |
154
|
|
|
if path[:-1] == removed_path[:-1]: |
155
|
|
|
log("Possible rename detected:", removal, "->", addition) |
156
|
|
|
renames.append((removed_path, path)) |
157
|
|
|
return renames |
158
|
|
|
|
159
|
|
|
result = {} |
160
|
|
|
for no, migration in enumerate(migrations): |
161
|
|
|
log("Migrating", no) |
162
|
|
|
log("Migration:", migration, lvl=debug) |
163
|
|
|
renamed = get_renames(migrations) |
164
|
|
|
|
165
|
|
|
for entry in migration: |
166
|
|
|
result = apply_entry(entry, migration[entry], result) |
167
|
|
|
|
168
|
|
|
pprint(result) |
169
|
|
|
return result |
170
|
|
|
|
171
|
|
|
def write_migration(schema, counter, path, previous, current): |
172
|
|
|
"""Write out complete migration data""" |
173
|
|
|
|
174
|
|
|
filename = "%s_%04i.json" % (schema, counter) |
175
|
|
|
migration = DeepDiff(previous, current, verbose_level=2).to_json_pickle() |
176
|
|
|
if migration == "{}": |
177
|
|
|
log("Nothing changed - no new migration data.", lvl=warn) |
178
|
|
|
return |
179
|
|
|
|
180
|
|
|
log("Writing migration: ", os.path.join(path, filename)) |
181
|
|
|
log(migration, pretty=True) |
182
|
|
|
|
183
|
|
|
with open(os.path.join(path, filename), "w") as f: |
184
|
|
|
f.write(migration) |
185
|
|
|
|
186
|
|
|
for schema_entrypoint in iter_entry_points(group="isomer.schemata", name=None): |
187
|
|
|
try: |
188
|
|
|
log("Schemata found: ", schema_entrypoint.name, lvl=debug) |
189
|
|
|
if schema is not None and schema_entrypoint.name != schema: |
190
|
|
|
continue |
191
|
|
|
|
192
|
|
|
entrypoints[schema_entrypoint.name] = schema_entrypoint |
193
|
|
|
pprint(schema_entrypoint.dist.location) |
194
|
|
|
schema_top = schema_entrypoint.dist.location |
195
|
|
|
schema_migrations = schema_entrypoint.module_name.replace( |
196
|
|
|
"schemata", "migrations" |
197
|
|
|
).replace(".", "/") |
198
|
|
|
path = os.path.join(schema_top, schema_migrations) |
199
|
|
|
new_model = schema_entrypoint.load()["schema"] |
200
|
|
|
|
201
|
|
|
migrations = [] |
202
|
|
|
|
203
|
|
|
try: |
204
|
|
|
for file in sorted(os.listdir(path)): |
205
|
|
|
if not file.endswith(".json"): |
206
|
|
|
continue |
207
|
|
|
fullpath = os.path.join(path, file) |
208
|
|
|
log("Importing migration", fullpath) |
209
|
|
|
with open(fullpath, "r") as f: |
210
|
|
|
migration = DeepDiff.from_json_pickle(f.read()) |
211
|
|
|
migrations.append(migration) |
212
|
|
|
log("Successfully imported") |
213
|
|
|
|
214
|
|
|
if len(migrations) == 0: |
215
|
|
|
raise ImportError |
216
|
|
|
pprint(migrations) |
217
|
|
|
model = _apply_migrations(migrations, new_model) |
218
|
|
|
write_migration(schema, len(migrations) + 1, path, model, new_model) |
219
|
|
|
except ImportError as e: |
220
|
|
|
log("No previous migrations for", schema, e, type(e), exc=True) |
221
|
|
|
|
222
|
|
|
if len(migrations) == 0: |
223
|
|
|
write_migration(schema, 1, path, None, new_model) |
224
|
|
|
|
225
|
|
|
except (ImportError, DistributionNotFound) as e: |
226
|
|
|
log( |
227
|
|
|
"Problematic schema: ", |
228
|
|
|
e, |
229
|
|
|
type(e), |
230
|
|
|
schema_entrypoint.name, |
231
|
|
|
exc=True, |
232
|
|
|
lvl=warn, |
233
|
|
|
) |
234
|
|
|
|
235
|
|
|
log( |
236
|
|
|
"Found schemata: ", sorted(entrypoints.keys()), lvl=debug |
237
|
|
|
) |
238
|
|
|
|
239
|
|
|
log("Entrypoints:", entrypoints, pretty=True, lvl=debug) |
240
|
|
|
|
241
|
|
|
def make_single_migration(old, new): |
242
|
|
|
pass |
243
|
|
|
|
244
|
|
|
|
245
|
|
|
def apply_migrations(ctx): |
246
|
|
|
"""Apply migrations to a database""" |
247
|
|
|
log("Applying migrations is not implemented, yet.", lvl=warn) |
248
|
|
|
|
249
|
|
|
_ = ctx |
250
|
|
|
|