Conditions | 33 |
Total Lines | 189 |
Code Lines | 143 |
Lines | 0 |
Ratio | 0 % |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like migration.make_migrations() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | #!/usr/bin/env python |
||
54 | def make_migrations(schema=None): |
||
55 | """Create migration data for a specified schema""" |
||
56 | |||
57 | entrypoints = {} |
||
58 | old = {} |
||
59 | |||
60 | def _apply_migrations(migrations, new_model): |
||
61 | """Apply migration data to compile an up to date model""" |
||
62 | |||
63 | def get_path(raw_path): |
||
64 | """Get local path of schema definition""" |
||
65 | |||
66 | log("RAW PATH:", raw_path, type(raw_path)) |
||
67 | path = [] |
||
68 | for item in raw_path.split("["): |
||
69 | log(item) |
||
70 | item = item.rstrip("]") |
||
71 | item = item.replace('"', "") |
||
72 | item = item.replace("'", "") |
||
73 | try: |
||
74 | item = int(item) |
||
75 | except ValueError: |
||
76 | pass |
||
77 | path.append(item) |
||
78 | path.remove("root") |
||
79 | log("PATH:", path) |
||
80 | return path |
||
81 | |||
82 | def apply_entry(changetype, change, result): |
||
83 | """Upgrade with a single migration""" |
||
84 | |||
85 | def apply_removes(removes, result): |
||
86 | """Delete removed fields""" |
||
87 | |||
88 | for remove in removes: |
||
89 | path = get_path(remove) |
||
90 | amount = dpath.util.delete(result, path) |
||
91 | if amount != 1: |
||
92 | log("Not exactly one removed!", path, remove, lvl=warn) |
||
93 | return result |
||
94 | |||
95 | def apply_additions(additions, result): |
||
96 | """Add newly added fields""" |
||
97 | |||
98 | for addition in additions: |
||
99 | path = get_path(addition) |
||
100 | entry = additions[addition] |
||
101 | log("Adding:", entry, "at", path) |
||
102 | dpath.util.new(result, path, entry) |
||
103 | return result |
||
104 | |||
105 | if changetype == "type_changes": |
||
106 | log("Creating new object") |
||
107 | result = change["root"]["new_value"] |
||
108 | return result |
||
109 | |||
110 | if changetype == "dictionary_item_added": |
||
111 | log("Adding items") |
||
112 | result = apply_additions(change, result) |
||
113 | elif changetype == "dictionary_item_removed": |
||
114 | log("Removing items") |
||
115 | result = apply_removes(change, result) |
||
116 | elif changetype == "values_changed": |
||
117 | log("Changing items' types") |
||
118 | for item in change: |
||
119 | path = get_path(item) |
||
120 | log( |
||
121 | "Changing", |
||
122 | path, |
||
123 | "from", |
||
124 | change[item]["old_value"], |
||
125 | " to", |
||
126 | change[item]["new_value"], |
||
127 | ) |
||
128 | if dpath.util.get(result, path) != change[item]["old_value"]: |
||
129 | log("Value change did not work!", lvl=warn) |
||
130 | amount = dpath.util.set(result, path, change[item]["new_value"]) |
||
131 | if amount != 1: |
||
132 | log("Not exactly one changed!", path, item, lvl=warn) |
||
133 | |||
134 | return result |
||
135 | |||
136 | def get_renames(migrations): |
||
137 | """Check migrations for renamed fields""" |
||
138 | |||
139 | log("Checking for rename operations:") |
||
140 | # pprint(migrations) |
||
141 | added = removed = None |
||
142 | |||
143 | for entry in migrations: |
||
144 | added = entry.get("dictionary_item_added", None) |
||
145 | removed = entry.get("dictionary_item_removed", None) |
||
146 | |||
147 | renames = [] |
||
148 | |||
149 | if added and removed: |
||
150 | for addition in added: |
||
151 | path = get_path(addition) |
||
152 | for removal in removed: |
||
153 | removed_path = get_path(removal) |
||
154 | if path[:-1] == removed_path[:-1]: |
||
155 | log("Possible rename detected:", removal, "->", addition) |
||
156 | renames.append((removed_path, path)) |
||
157 | return renames |
||
158 | |||
159 | result = {} |
||
160 | for no, migration in enumerate(migrations): |
||
161 | log("Migrating", no) |
||
162 | log("Migration:", migration, lvl=debug) |
||
163 | renamed = get_renames(migrations) |
||
164 | |||
165 | for entry in migration: |
||
166 | result = apply_entry(entry, migration[entry], result) |
||
167 | |||
168 | pprint(result) |
||
169 | return result |
||
170 | |||
171 | def write_migration(schema, counter, path, previous, current): |
||
172 | """Write out complete migration data""" |
||
173 | |||
174 | filename = "%s_%04i.json" % (schema, counter) |
||
175 | migration = DeepDiff(previous, current, verbose_level=2).to_json_pickle() |
||
176 | if migration == "{}": |
||
177 | log("Nothing changed - no new migration data.", lvl=warn) |
||
178 | return |
||
179 | |||
180 | log("Writing migration: ", os.path.join(path, filename)) |
||
181 | log(migration, pretty=True) |
||
182 | |||
183 | with open(os.path.join(path, filename), "w") as f: |
||
184 | f.write(migration) |
||
185 | |||
186 | for schema_entrypoint in iter_entry_points(group="isomer.schemata", name=None): |
||
187 | try: |
||
188 | log("Schemata found: ", schema_entrypoint.name, lvl=debug) |
||
189 | if schema is not None and schema_entrypoint.name != schema: |
||
190 | continue |
||
191 | |||
192 | entrypoints[schema_entrypoint.name] = schema_entrypoint |
||
193 | pprint(schema_entrypoint.dist.location) |
||
194 | schema_top = schema_entrypoint.dist.location |
||
195 | schema_migrations = schema_entrypoint.module_name.replace( |
||
196 | "schemata", "migrations" |
||
197 | ).replace(".", "/") |
||
198 | path = os.path.join(schema_top, schema_migrations) |
||
199 | new_model = schema_entrypoint.load()["schema"] |
||
200 | |||
201 | migrations = [] |
||
202 | |||
203 | try: |
||
204 | for file in sorted(os.listdir(path)): |
||
205 | if not file.endswith(".json"): |
||
206 | continue |
||
207 | fullpath = os.path.join(path, file) |
||
208 | log("Importing migration", fullpath) |
||
209 | with open(fullpath, "r") as f: |
||
210 | migration = DeepDiff.from_json_pickle(f.read()) |
||
211 | migrations.append(migration) |
||
212 | log("Successfully imported") |
||
213 | |||
214 | if len(migrations) == 0: |
||
215 | raise ImportError |
||
216 | pprint(migrations) |
||
217 | model = _apply_migrations(migrations, new_model) |
||
218 | write_migration(schema, len(migrations) + 1, path, model, new_model) |
||
219 | except ImportError as e: |
||
220 | log("No previous migrations for", schema, e, type(e), exc=True) |
||
221 | |||
222 | if len(migrations) == 0: |
||
223 | write_migration(schema, 1, path, None, new_model) |
||
224 | |||
225 | except (ImportError, DistributionNotFound) as e: |
||
226 | log( |
||
227 | "Problematic schema: ", |
||
228 | e, |
||
229 | type(e), |
||
230 | schema_entrypoint.name, |
||
231 | exc=True, |
||
232 | lvl=warn, |
||
233 | ) |
||
234 | |||
235 | log( |
||
236 | "Found schemata: ", sorted(entrypoints.keys()), lvl=debug |
||
237 | ) |
||
238 | |||
239 | log("Entrypoints:", entrypoints, pretty=True, lvl=debug) |
||
240 | |||
241 | def make_single_migration(old, new): |
||
242 | pass |
||
243 | |||
250 |