Total Complexity | 78 |
Total Lines | 315 |
Duplicated Lines | 6.35 % |
Changes | 19 | ||
Bugs | 0 | Features | 0 |
Duplicate code is one of the most pungent code smells. A rule that is often used is to re-structure code once it is duplicated in three or more places.
Common duplication problems, and corresponding solutions are:
Complex classes like Federation often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
1 | ################################################################# |
||
52 | class Federation(Base): |
||
53 | """ |
||
54 | Model describing an identity federation. |
||
55 | """ |
||
56 | |||
57 | name = models.CharField(blank=False, null=False, max_length=200, |
||
58 | unique=True, verbose_name=_(u'Name')) |
||
59 | |||
60 | type = models.CharField(blank=True, null=True, max_length=100, |
||
61 | unique=False, verbose_name=_(u'Type'), choices=FEDERATION_TYPES) |
||
62 | |||
63 | url = models.URLField(verbose_name='Federation url', |
||
64 | blank=True, null=True) |
||
65 | |||
66 | fee_schedule_url = models.URLField(verbose_name='Fee schedule url', |
||
67 | max_length=150, blank=True, null=True) |
||
68 | |||
69 | logo = models.ImageField(upload_to='federation_logo', blank=True, |
||
70 | null=True, verbose_name=_(u'Federation logo')) |
||
71 | |||
72 | is_interfederation = models.BooleanField(default=False, db_index=True, |
||
73 | verbose_name=_(u'Is interfederation')) |
||
74 | |||
75 | slug = models.SlugField(max_length=200, unique=True) |
||
76 | |||
77 | country = models.CharField(blank=True, null=True, max_length=100, |
||
78 | unique=False, verbose_name=_(u'Country')) |
||
79 | |||
80 | metadata_update = models.DateTimeField(blank=True, null=True, |
||
81 | unique=False, verbose_name=_(u'Metadata update date and time')) |
||
82 | |||
83 | certstats = models.CharField(blank=True, null=True, max_length=200, |
||
84 | unique=False, verbose_name=_(u'Certificate Stats')) |
||
85 | |||
86 | @property |
||
87 | def certificates(self): |
||
88 | return json.loads(self.certstats) |
||
89 | |||
90 | @property |
||
91 | def _metadata(self): |
||
92 | if not hasattr(self, '_metadata_cache'): |
||
93 | self._metadata_cache = self.load_file() |
||
94 | return self._metadata_cache |
||
95 | |||
96 | def __unicode__(self): |
||
97 | return self.name |
||
98 | |||
99 | def get_entity_metadata(self, entityid): |
||
100 | return self._metadata.get_entity(entityid) |
||
101 | |||
102 | def get_entity(self, entityid): |
||
103 | return self.entity_set.get(entityid=entityid) |
||
104 | |||
105 | def process_metadata(self): |
||
106 | metadata = self.load_file() |
||
107 | |||
108 | if self.file_id and metadata.file_id and metadata.file_id == self.file_id: |
||
109 | return |
||
110 | else: |
||
111 | self.file_id = metadata.file_id |
||
112 | |||
113 | if not metadata: |
||
114 | return |
||
115 | if not metadata.is_federation: |
||
116 | raise XmlDescriptionError("XML Haven't federation form") |
||
117 | |||
118 | update_obj(metadata.get_federation(), self) |
||
119 | self.certstats = MetadataParser.get_certstats(metadata.rootelem) |
||
120 | |||
121 | def _remove_deleted_entities(self, entities_from_xml): |
||
122 | removed = 0 |
||
123 | for entity in self.entity_set.all(): |
||
124 | # Remove entity relation if does not exist in metadata |
||
125 | if not entity.entityid in entities_from_xml: |
||
126 | Entity_Federations.objects.filter( |
||
127 | federation=self, entity=entity).delete() |
||
128 | removed += 1 |
||
129 | |||
130 | return removed |
||
131 | |||
132 | View Code Duplication | def _get_or_create_ecategories(self, entity, cached_entity_categories): |
|
|
|||
133 | entity_categories = [] |
||
134 | efed = Entity_Federations.objects.get_or_create(federation=self, entity=entity)[0] |
||
135 | cur_cached_categories = [ |
||
136 | t.category_id for t in efed.entity_categories.all()] |
||
137 | for ecategory in entity.xml_categories: |
||
138 | if ecategory in cur_cached_categories: |
||
139 | break |
||
140 | |||
141 | if cached_entity_categories is None: |
||
142 | entity_category, _ = EntityCategory.objects.get_or_create( |
||
143 | category_id=ecategory) |
||
144 | else: |
||
145 | if ecategory in cached_entity_categories: |
||
146 | entity_category = cached_entity_categories[ecategory] |
||
147 | else: |
||
148 | entity_category = EntityCategory.objects.create( |
||
149 | category_id=ecategory) |
||
150 | entity_categories.append(entity_category) |
||
151 | return entity_categories |
||
152 | |||
153 | def _update_entities(self, entities_to_update, entities_to_add): |
||
154 | for e in entities_to_update: |
||
155 | e.save() |
||
156 | |||
157 | for e in entities_to_add: |
||
158 | membership = Entity_Federations.objects.get_or_create(federation=self, entity=e)[0] |
||
159 | membership.registration_instant = e.registration_instant.date() if e.registration_instant else None |
||
160 | |||
161 | if e.xml_categories: |
||
162 | db_entity_categories = EntityCategory.objects.all() |
||
163 | cached_entity_categories = { |
||
164 | entity_category.category_id: entity_category for entity_category in db_entity_categories} |
||
165 | |||
166 | # Delete categories no more present in XML |
||
167 | membership.entity_categories.clear() |
||
168 | |||
169 | # Create all entities, if not alread in database |
||
170 | entity_categories = self._get_or_create_ecategories(e, cached_entity_categories) |
||
171 | |||
172 | # Add categories to entity |
||
173 | if len(entity_categories) > 0: |
||
174 | membership.entity_categories.add(*entity_categories) |
||
175 | else: |
||
176 | # No categories in XML, delete eventual categorie sin DB |
||
177 | membership.entity_categories.clear() |
||
178 | |||
179 | membership.save() |
||
180 | |||
181 | def _add_new_entities(self, entities, entities_from_xml, request, federation_slug): |
||
182 | db_entity_types = EntityType.objects.all() |
||
183 | cached_entity_types = { |
||
184 | entity_type.xmlname: entity_type for entity_type in db_entity_types} |
||
185 | |||
186 | entities_to_add = [] |
||
187 | entities_to_update = [] |
||
188 | |||
189 | for m_id in entities_from_xml: |
||
190 | if request and federation_slug: |
||
191 | request.session['%s_cur_entities' % federation_slug] += 1 |
||
192 | request.session.save() |
||
193 | |||
194 | created = False |
||
195 | if m_id in entities: |
||
196 | entity = entities[m_id] |
||
197 | else: |
||
198 | entity, created = Entity.objects.get_or_create(entityid=m_id) |
||
199 | |||
200 | entityid = entity.entityid |
||
201 | name = entity.name |
||
202 | registration_authority = entity.registration_authority |
||
203 | certstats = entity.certstats |
||
204 | display_protocols = entity._display_protocols |
||
205 | |||
206 | entity_from_xml = self._metadata.get_entity(m_id, False) |
||
207 | entity.process_metadata(False, entity_from_xml, cached_entity_types, self) |
||
208 | |||
209 | if created or entity.has_changed(entityid, name, registration_authority, certstats, display_protocols): |
||
210 | entities_to_update.append(entity) |
||
211 | |||
212 | entities_to_add.append(entity) |
||
213 | |||
214 | self._update_entities(entities_to_update, entities_to_add) |
||
215 | return len(entities_to_update) |
||
216 | |||
217 | @staticmethod |
||
218 | def _daterange(start_date, end_date): |
||
219 | for n in range(int((end_date - start_date).days + 1)): |
||
220 | yield start_date + timedelta(n) |
||
221 | |||
222 | def compute_new_stats(self): |
||
223 | if not self._metadata: return ([], []) |
||
224 | entities_from_xml = self._metadata.get_entities() |
||
225 | |||
226 | entities = Entity.objects.filter(entityid__in=entities_from_xml) |
||
227 | entities = entities.prefetch_related('types') |
||
228 | Entity_Federations.objects.filter(federation=self) |
||
229 | |||
230 | try: |
||
231 | first_date = EntityStat.objects.filter( |
||
232 | federation=self).aggregate(Max('time'))['time__max'] |
||
233 | if not first_date: |
||
234 | raise Exception('Not able to find statistical data in the DB.') |
||
235 | except Exception: |
||
236 | first_date = datetime(2010, 1, 1) |
||
237 | first_date = pytz.utc.localize(first_date) |
||
238 | |||
239 | for curtimestamp in self._daterange(first_date, timezone.now() - timedelta(1)): |
||
240 | computed = {} |
||
241 | not_computed = [] |
||
242 | entity_stats = [] |
||
243 | for feature in stats['features'].keys(): |
||
244 | fun = getattr(self, 'get_%s' % feature, None) |
||
245 | |||
246 | if callable(fun): |
||
247 | stat = EntityStat() |
||
248 | stat.feature = feature |
||
249 | stat.time = curtimestamp |
||
250 | stat.federation = self |
||
251 | stat.value = fun( |
||
252 | entities, stats['features'][feature], curtimestamp) |
||
253 | entity_stats.append(stat) |
||
254 | computed[feature] = stat.value |
||
255 | else: |
||
256 | not_computed.append(feature) |
||
257 | |||
258 | from_time = datetime.combine(curtimestamp, time.min) |
||
259 | if timezone.is_naive(from_time): |
||
260 | from_time = pytz.utc.localize(from_time) |
||
261 | to_time = datetime.combine(curtimestamp, time.max) |
||
262 | if timezone.is_naive(to_time): |
||
263 | to_time = pytz.utc.localize(to_time) |
||
264 | |||
265 | EntityStat.objects.filter( |
||
266 | federation=self, time__gte=from_time, time__lte=to_time).delete() |
||
267 | EntityStat.objects.bulk_create(entity_stats) |
||
268 | |||
269 | return (computed, not_computed) |
||
270 | |||
271 | def process_metadata_entities(self, request=None, federation_slug=None): |
||
272 | if not self._metadata: return |
||
273 | entities_from_xml = self._metadata.get_entities() |
||
274 | removed = self._remove_deleted_entities(entities_from_xml) |
||
275 | |||
276 | entities = {} |
||
277 | db_entities = Entity.objects.filter(entityid__in=entities_from_xml) |
||
278 | db_entities = db_entities.prefetch_related('types') |
||
279 | #TODO add prefetch related, federations, entity_categories |
||
280 | |||
281 | for entity in db_entities.all(): |
||
282 | entities[entity.entityid] = entity |
||
283 | |||
284 | if request and federation_slug: |
||
285 | request.session['%s_num_entities' % |
||
286 | federation_slug] = len(entities_from_xml) |
||
287 | request.session['%s_cur_entities' % federation_slug] = 0 |
||
288 | request.session['%s_process_done' % federation_slug] = False |
||
289 | request.session.save() |
||
290 | |||
291 | updated = self._add_new_entities( |
||
292 | entities, entities_from_xml, request, federation_slug) |
||
293 | |||
294 | if request and federation_slug: |
||
295 | request.session['%s_process_done' % federation_slug] = True |
||
296 | request.session.save() |
||
297 | |||
298 | return removed, updated |
||
299 | |||
300 | def get_absolute_url(self): |
||
301 | return reverse('federation_view', args=[self.slug]) |
||
302 | |||
303 | @classmethod |
||
304 | def get_sp(cls, entities, xml_name, ref_date=None): |
||
305 | if ref_date and ref_date < pytz.utc.localize(datetime.now() - timedelta(days=1)): |
||
306 | selected = entities.filter( |
||
307 | types__xmlname=xml_name, entity_federations__registration_instant__lt=ref_date) |
||
308 | else: |
||
309 | selected = entities.filter(types__xmlname=xml_name) |
||
310 | return len(selected) |
||
311 | |||
312 | @classmethod |
||
313 | def get_idp(cls, entities, xml_name, ref_date=None): |
||
314 | if ref_date and ref_date < pytz.utc.localize(datetime.now() - timedelta(days=1)): |
||
315 | selected = entities.filter( |
||
316 | types__xmlname=xml_name, entity_federations__registration_instant__lt=ref_date) |
||
317 | else: |
||
318 | selected = entities.filter(types__xmlname=xml_name) |
||
319 | return len(selected) |
||
320 | |||
321 | @classmethod |
||
322 | def get_aa(cls, entities, xml_name, ref_date=None): |
||
323 | if ref_date and ref_date < pytz.utc.localize(datetime.now() - timedelta(days=1)): |
||
324 | selected = entities.filter( |
||
325 | types__xmlname=xml_name, entity_federations__registration_instant__lt=ref_date) |
||
326 | else: |
||
327 | selected = entities.filter(types__xmlname=xml_name) |
||
328 | return len(selected) |
||
329 | |||
330 | def get_sp_saml1(self, entities, xml_name, ref_date=None): |
||
331 | return self.get_stat_protocol(entities, xml_name, 'SPSSODescriptor', ref_date) |
||
332 | |||
333 | def get_sp_saml2(self, entities, xml_name, ref_date=None): |
||
334 | return self.get_stat_protocol(entities, xml_name, 'SPSSODescriptor', ref_date) |
||
335 | |||
336 | def get_sp_shib1(self, entities, xml_name, ref_date=None): |
||
337 | return self.get_stat_protocol(entities, xml_name, 'SPSSODescriptor', ref_date) |
||
338 | |||
339 | def get_idp_saml1(self, entities, xml_name, ref_date=None): |
||
340 | return self.get_stat_protocol(entities, xml_name, 'IDPSSODescriptor', ref_date) |
||
341 | |||
342 | def get_idp_saml2(self, entities, xml_name, ref_date=None): |
||
343 | return self.get_stat_protocol(entities, xml_name, 'IDPSSODescriptor', ref_date) |
||
344 | |||
345 | def get_idp_shib1(self, entities, xml_name, ref_date=None): |
||
346 | return self.get_stat_protocol(entities, xml_name, 'IDPSSODescriptor', ref_date) |
||
347 | |||
348 | def get_stat_protocol(self, entities, xml_name, service_type, ref_date): |
||
349 | if ref_date and ref_date < pytz.utc.localize(datetime.now() - timedelta(days=1)): |
||
350 | selected = entities.filter(types__xmlname=service_type, |
||
351 | _display_protocols__contains=xml_name, |
||
352 | entity_federations__registration_instant__lt=ref_date) |
||
353 | else: |
||
354 | selected = entities.filter(types__xmlname=service_type, |
||
355 | _display_protocols__contains=xml_name) |
||
356 | |||
357 | return len(selected) |
||
358 | |||
359 | def can_edit(self, user, delete): |
||
360 | if user.is_superuser: |
||
361 | return True |
||
362 | |||
363 | permission = 'delete_federation' if delete else 'change_federation' |
||
364 | if user.has_perm('metadataparser.%s' % permission) and user in self.editor_users.all(): |
||
365 | return True |
||
366 | return False |
||
367 | |||
393 |