Conditions | 25 |
Total Lines | 127 |
Code Lines | 75 |
Lines | 0 |
Ratio | 0 % |
Tests | 61 |
CRAP Score | 25 |
Changes | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like abydos.distance._sift4_extended.Sift4Extended.dist_abs() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | # Copyright 2019-2020 by Christopher C. Little. |
||
128 | def dist_abs(self, src: str, tar: str) -> float: |
||
129 | """Return the Sift4 Extended distance between two strings. |
||
130 | |||
131 | Parameters |
||
132 | ---------- |
||
133 | src : str |
||
134 | Source string for comparison |
||
135 | tar : str |
||
136 | Target string for comparison |
||
137 | |||
138 | Returns |
||
139 | ------- |
||
140 | int |
||
141 | The Sift4 distance according to the extended formula |
||
142 | |||
143 | Examples |
||
144 | -------- |
||
145 | >>> cmp = Sift4Extended() |
||
146 | >>> cmp.dist_abs('cat', 'hat') |
||
147 | 1 |
||
148 | >>> cmp.dist_abs('Niall', 'Neil') |
||
149 | 2 |
||
150 | >>> cmp.dist_abs('aluminum', 'Catalan') |
||
151 | 5 |
||
152 | >>> cmp.dist_abs('ATCG', 'TAGC') |
||
153 | 1 | 2 |
|
154 | 1 | ||
155 | |||
156 | 1 | .. versionadded:: 0.4.0 |
|
157 | 1 | ||
158 | """ |
||
159 | 1 | src_list = self._tokenizer.tokenize(src).get_list() |
|
160 | 1 | tar_list = self._tokenizer.tokenize(tar).get_list() |
|
161 | |||
162 | 1 | if not src_list: |
|
163 | 1 | return len(tar_list) |
|
164 | |||
165 | 1 | if not tar_list: |
|
166 | 1 | return len(src_list) |
|
167 | 1 | ||
168 | 1 | src_len = len(src_list) |
|
169 | 1 | tar_len = len(tar_list) |
|
170 | 1 | ||
171 | src_cur = 0 |
||
172 | 1 | tar_cur = 0 |
|
173 | 1 | lcss = 0.0 |
|
174 | 1 | local_cs = 0.0 |
|
175 | trans = 0.0 |
||
176 | offset_arr = [] # type: List[Dict[str, Union[int, bool]]] |
||
177 | 1 | ||
178 | 1 | while (src_cur < src_len) and (tar_cur < tar_len): |
|
179 | 1 | if self._token_matcher(src_list[src_cur], tar_list[tar_cur]): |
|
180 | 1 | local_cs += self._matching_evaluator( |
|
181 | 1 | src_list[src_cur], tar_list[tar_cur] |
|
182 | 1 | ) |
|
183 | is_trans = False |
||
184 | i = 0 |
||
185 | 1 | while i < len(offset_arr): |
|
186 | 1 | ofs = offset_arr[i] |
|
187 | if src_cur <= ofs['src_cur'] or tar_cur <= ofs['tar_cur']: |
||
188 | is_trans = abs(tar_cur - src_cur) >= abs( |
||
189 | 1 | ofs['tar_cur'] - ofs['src_cur'] |
|
190 | 1 | ) |
|
191 | 1 | if is_trans: |
|
192 | trans += self._transposition_cost_evaluator( |
||
193 | src_cur, tar_cur |
||
194 | 1 | ) |
|
195 | 1 | elif not ofs['trans']: |
|
196 | 1 | ofs['trans'] = True |
|
197 | trans += self._transposition_cost_evaluator( |
||
198 | 1 | ofs['tar_cur'], ofs['src_cur'] |
|
199 | ) |
||
200 | 1 | break |
|
201 | elif src_cur > ofs['tar_cur'] and tar_cur > ofs['src_cur']: |
||
202 | del offset_arr[i] |
||
203 | else: |
||
204 | 1 | i += 1 |
|
205 | 1 | ||
206 | 1 | offset_arr.append( |
|
207 | 1 | {'src_cur': src_cur, 'tar_cur': tar_cur, 'trans': is_trans} |
|
208 | 1 | ) |
|
209 | 1 | else: |
|
210 | lcss += self._local_length_evaluator(local_cs) |
||
211 | local_cs = 0 |
||
212 | 1 | if src_cur != tar_cur: |
|
213 | 1 | src_cur = tar_cur = min(src_cur, tar_cur) |
|
214 | for i in range(self._max_offset): |
||
215 | if not ( |
||
216 | 1 | (src_cur + i < src_len) or (tar_cur + i < tar_len) |
|
217 | 1 | ): |
|
218 | 1 | break |
|
219 | 1 | if (src_cur + i < src_len) and ( |
|
220 | self._token_matcher( |
||
221 | src_list[src_cur + i], tar_list[tar_cur] |
||
222 | 1 | ) |
|
223 | 1 | ): |
|
224 | 1 | src_cur += i - 1 |
|
225 | tar_cur -= 1 |
||
226 | 1 | break |
|
227 | 1 | if (tar_cur + i < tar_len) and ( |
|
228 | self._token_matcher( |
||
229 | 1 | src_list[src_cur], tar_list[tar_cur + i] |
|
230 | 1 | ) |
|
231 | ): |
||
232 | src_cur -= 1 |
||
233 | 1 | tar_cur += i - 1 |
|
234 | 1 | break |
|
235 | |||
236 | 1 | src_cur += 1 |
|
237 | 1 | tar_cur += 1 |
|
238 | 1 | ||
239 | 1 | if self._max_distance: |
|
240 | temporary_distance = self._local_length_evaluator( |
||
241 | 1 | max(src_cur, tar_cur) |
|
242 | 1 | ) - self._transpositions_evaluator(lcss, trans) |
|
243 | if temporary_distance >= self._max_distance: |
||
244 | return round(temporary_distance) |
||
245 | |||
246 | if (src_cur >= src_len) or (tar_cur >= tar_len): |
||
247 | 1 | lcss += self._local_length_evaluator(local_cs) |
|
248 | local_cs = 0 |
||
249 | src_cur = tar_cur = min(src_cur, tar_cur) |
||
250 | |||
251 | lcss += self._local_length_evaluator(local_cs) |
||
252 | return round( |
||
253 | self._local_length_evaluator(max(src_len, tar_len)) |
||
254 | - self._transpositions_evaluator(lcss, trans) |
||
255 | ) |
||
365 |