1
|
|
|
################################################################# |
2
|
|
|
# MET v2 Metadate Explorer Tool |
3
|
|
|
# |
4
|
|
|
# This Software is Open Source. See License: https://github.com/TERENA/met/blob/master/LICENSE.md |
5
|
|
|
# Copyright (c) 2012, TERENA All rights reserved. |
6
|
|
|
# |
7
|
|
|
# This Software is based on MET v1 developed for TERENA by Yaco Sistemas, http://www.yaco.es/ |
8
|
|
|
# MET v2 was developed for TERENA by Tamim Ziai, DAASI International GmbH, http://www.daasi.de |
9
|
|
|
# Current version of MET has been revised for performance improvements by Andrea Biancini, |
10
|
|
|
# Consortium GARR, http://www.garr.it |
11
|
|
|
######################################################################################### |
12
|
|
|
|
13
|
|
|
import simplejson as json |
14
|
|
|
import pytz |
15
|
|
|
|
16
|
|
|
from os import path |
17
|
|
|
from urlparse import urlparse |
18
|
|
|
from urllib import quote_plus |
19
|
|
|
from datetime import datetime, time, timedelta |
20
|
|
|
|
21
|
|
|
from django.conf import settings |
22
|
|
|
from django.contrib import messages |
23
|
|
|
from django.contrib.auth.models import User |
24
|
|
|
from django.core import validators |
25
|
|
|
from django.core.cache import cache |
26
|
|
|
from django.core.files.base import ContentFile |
27
|
|
|
from django.core.urlresolvers import reverse |
28
|
|
|
from django.db import models |
29
|
|
|
from django.db.models import Count, Max |
30
|
|
|
from django.db.models.signals import pre_save |
31
|
|
|
from django.db.models.query import QuerySet |
32
|
|
|
from django.dispatch import receiver |
33
|
|
|
from django.template.defaultfilters import slugify |
34
|
|
|
from django.utils.safestring import mark_safe |
35
|
|
|
from django.utils.translation import ugettext_lazy as _ |
36
|
|
|
from django.utils import timezone |
37
|
|
|
|
38
|
|
|
from lxml import etree |
39
|
|
|
|
40
|
|
|
from pyff.mdrepo import MDRepository |
41
|
|
|
from pyff.pipes import Plumbing |
42
|
|
|
|
43
|
|
|
from met.metadataparser.utils import compare_filecontents |
44
|
|
|
from met.metadataparser.xmlparser import MetadataParser, DESCRIPTOR_TYPES_DISPLAY |
45
|
|
|
from met.metadataparser.templatetags import attributemap |
46
|
|
|
|
47
|
|
|
|
48
|
|
|
TOP_LENGTH = getattr(settings, "TOP_LENGTH", 5) |
49
|
|
|
stats = getattr(settings, "STATS") |
50
|
|
|
|
51
|
|
|
FEDERATION_TYPES = ( |
52
|
|
|
(None, ''), |
53
|
|
|
('hub-and-spoke', 'Hub and Spoke'), |
54
|
|
|
('mesh', 'Full Mesh'), |
55
|
|
|
) |
56
|
|
|
|
57
|
|
|
|
58
|
|
|
def update_obj(mobj, obj, attrs=None): |
59
|
|
|
for_attrs = attrs or getattr(mobj, 'all_attrs', []) |
60
|
|
|
for attrb in attrs or for_attrs: |
61
|
|
|
if (getattr(mobj, attrb, None) and |
62
|
|
|
getattr(obj, attrb, None) and |
63
|
|
|
getattr(mobj, attrb) != getattr(obj, attrb)): |
64
|
|
|
setattr(obj, attrb, getattr(mobj, attrb)) |
65
|
|
|
|
66
|
|
|
class JSONField(models.CharField): |
67
|
|
|
"""JSONField is a generic textfield that neatly serializes/unserializes |
68
|
|
|
JSON objects seamlessly |
69
|
|
|
|
70
|
|
|
The json spec claims you must use a collection type at the top level of |
71
|
|
|
the data structure. However the simplesjon decoder and Firefox both encode |
72
|
|
|
and decode non collection types that do not exist inside a collection. |
73
|
|
|
The to_python method relies on the value being an instance of basestring |
74
|
|
|
to ensure that it is encoded. If a string is the sole value at the |
75
|
|
|
point the field is instanced, to_python attempts to decode the sting because |
76
|
|
|
it is derived from basestring but cannot be encodeded and throws the |
77
|
|
|
exception ValueError: No JSON object could be decoded. |
78
|
|
|
""" |
79
|
|
|
|
80
|
|
|
# Used so to_python() is called |
81
|
|
|
__metaclass__ = models.SubfieldBase |
82
|
|
|
description = _("JSON object") |
83
|
|
|
|
84
|
|
|
def __init__(self, *args, **kwargs): |
85
|
|
|
super(JSONField, self).__init__(*args, **kwargs) |
86
|
|
|
self.validators.append(validators.MaxLengthValidator(self.max_length)) |
87
|
|
|
|
88
|
|
|
@classmethod |
89
|
|
|
def get_internal_type(cls): |
90
|
|
|
return "TextField" |
91
|
|
|
|
92
|
|
|
@classmethod |
93
|
|
|
def to_python(cls, value): |
94
|
|
|
"""Convert our string value to JSON after we load it from the DB""" |
95
|
|
|
if value == "": |
96
|
|
|
return None |
97
|
|
|
|
98
|
|
|
try: |
99
|
|
|
if isinstance(value, basestring): |
100
|
|
|
return json.loads(value) |
101
|
|
|
except ValueError: |
102
|
|
|
return value |
103
|
|
|
|
104
|
|
|
return value |
105
|
|
|
|
106
|
|
|
def get_prep_value(self, value): |
107
|
|
|
"""Convert our JSON object to a string before we save""" |
108
|
|
|
|
109
|
|
|
if not value or value == "": |
110
|
|
|
return None |
111
|
|
|
|
112
|
|
|
db_value = json.dumps(value) |
113
|
|
|
return super(JSONField, self).get_prep_value(db_value) |
114
|
|
|
|
115
|
|
|
def get_db_prep_value(self, value, connection, prepared=False): |
116
|
|
|
"""Convert our JSON object to a string before we save""" |
117
|
|
|
|
118
|
|
|
if not value or value == "": |
119
|
|
|
return None |
120
|
|
|
|
121
|
|
|
db_value = json.dumps(value) |
122
|
|
|
return super(JSONField, self).get_db_prep_value(db_value, connection, prepared) |
123
|
|
|
|
124
|
|
|
|
125
|
|
|
class Base(models.Model): |
126
|
|
|
file_url = models.CharField(verbose_name='Metadata url', |
127
|
|
|
max_length=1000, |
128
|
|
|
blank=True, null=True, |
129
|
|
|
help_text=_(u'Url to fetch metadata file')) |
130
|
|
|
file = models.FileField(upload_to='metadata', blank=True, null=True, |
131
|
|
|
verbose_name=_(u'metadata xml file'), |
132
|
|
|
help_text=_("if url is set, metadata url will be " |
133
|
|
|
"fetched and replace file value")) |
134
|
|
|
file_id = models.CharField(blank=True, null=True, max_length=500, |
135
|
|
|
verbose_name=_(u'File ID')) |
136
|
|
|
|
137
|
|
|
registration_authority = models.CharField(verbose_name=_('Registration Authority'), |
138
|
|
|
max_length=200, blank=True, null=True) |
139
|
|
|
|
140
|
|
|
editor_users = models.ManyToManyField(User, null=True, blank=True, |
141
|
|
|
verbose_name=_('editor users')) |
142
|
|
|
|
143
|
|
|
class Meta(object): |
144
|
|
|
abstract = True |
145
|
|
|
|
146
|
|
|
class XmlError(Exception): |
147
|
|
|
pass |
148
|
|
|
|
149
|
|
|
def __unicode__(self): |
150
|
|
|
return self.url or u"Metadata %s" % self.id |
151
|
|
|
|
152
|
|
|
def load_file(self): |
153
|
|
|
if not hasattr(self, '_loaded_file'): |
154
|
|
|
#Only load file and parse it, don't create/update any objects |
155
|
|
|
if not self.file: |
156
|
|
|
return None |
157
|
|
|
self._loaded_file = MetadataParser(filename=self.file.path) |
158
|
|
|
return self._loaded_file |
159
|
|
|
|
160
|
|
|
def _get_metadata_stream(self, load_streams): |
161
|
|
|
try: |
162
|
|
|
load = [] |
163
|
|
|
select = [] |
164
|
|
|
|
165
|
|
|
count = 1 |
166
|
|
|
for stream in load_streams: |
167
|
|
|
curid = "%s%d" % (self.slug, count) |
168
|
|
|
load.append("%s as %s" % (stream[0], curid)) |
169
|
|
|
if stream[1] == 'SP' or stream[1] == 'IDP': |
170
|
|
|
select.append("%s!//md:EntityDescriptor[md:%sSSODescriptor]" % (curid, stream[1])) |
171
|
|
|
else: |
172
|
|
|
select.append("%s" % curid) |
173
|
|
|
count = count + 1 |
174
|
|
|
|
175
|
|
|
if len(select) > 0: |
176
|
|
|
pipeline = [{'load': load}, {'select': select}] |
177
|
|
|
else: |
178
|
|
|
pipeline = [{'load': load}, 'select'] |
179
|
|
|
|
180
|
|
|
md = MDRepository() |
181
|
|
|
entities = Plumbing(pipeline=pipeline, id=self.slug).process(md, state={'batch': True, 'stats': {}}) |
182
|
|
|
return etree.tostring(entities) |
183
|
|
|
except Exception, e: |
184
|
|
|
raise Exception('Getting metadata from %s failed.\nError: %s' % (load_streams, e)) |
185
|
|
|
|
186
|
|
|
def fetch_metadata_file(self, file_name): |
187
|
|
|
file_url = self.file_url |
188
|
|
|
if not file_url or file_url == '': |
189
|
|
|
return |
190
|
|
|
|
191
|
|
|
metadata_files = [] |
192
|
|
|
files = file_url.split("|") |
193
|
|
|
for curfile in files: |
194
|
|
|
cursource = curfile.split(";") |
195
|
|
|
if len(cursource) == 1: |
196
|
|
|
cursource.append("All") |
197
|
|
|
metadata_files.append(cursource) |
198
|
|
|
|
199
|
|
|
req = self._get_metadata_stream(metadata_files) |
200
|
|
|
|
201
|
|
|
try: |
202
|
|
|
self.file.seek(0) |
203
|
|
|
original_file_content = self.file.read() |
204
|
|
|
if compare_filecontents(original_file_content, req): |
205
|
|
|
return False |
206
|
|
|
except Exception: |
207
|
|
|
pass |
208
|
|
|
|
209
|
|
|
filename = path.basename("%s-metadata.xml" % file_name) |
210
|
|
|
self.file.delete(save=False) |
211
|
|
|
self.file.save(filename, ContentFile(req), save=False) |
212
|
|
|
return True |
213
|
|
|
|
214
|
|
|
@classmethod |
215
|
|
|
def process_metadata(cls): |
216
|
|
|
raise NotImplementedError() |
217
|
|
|
|
218
|
|
|
|
219
|
|
|
class XmlDescriptionError(Exception): |
220
|
|
|
pass |
221
|
|
|
|
222
|
|
|
|
223
|
|
|
class Federation(Base): |
224
|
|
|
name = models.CharField(blank=False, null=False, max_length=200, |
225
|
|
|
unique=True, verbose_name=_(u'Name')) |
226
|
|
|
|
227
|
|
|
type = models.CharField(blank=True, null=True, max_length=100, |
228
|
|
|
unique=False, verbose_name=_(u'Type'), choices=FEDERATION_TYPES) |
229
|
|
|
|
230
|
|
|
url = models.URLField(verbose_name='Federation url', |
231
|
|
|
blank=True, null=True) |
232
|
|
|
|
233
|
|
|
fee_schedule_url = models.URLField(verbose_name='Fee schedule url', |
234
|
|
|
max_length=150, blank=True, null=True) |
235
|
|
|
|
236
|
|
|
logo = models.ImageField(upload_to='federation_logo', blank=True, |
237
|
|
|
null=True, verbose_name=_(u'Federation logo')) |
238
|
|
|
is_interfederation = models.BooleanField(default=False, db_index=True, |
239
|
|
|
verbose_name=_(u'Is interfederation')) |
240
|
|
|
slug = models.SlugField(max_length=200, unique=True) |
241
|
|
|
|
242
|
|
|
country = models.CharField(blank=True, null=True, max_length=100, |
243
|
|
|
unique=False, verbose_name=_(u'Country')) |
244
|
|
|
|
245
|
|
|
metadata_update = models.DateField(blank=True, null=True, |
246
|
|
|
unique=False, verbose_name=_(u'Metadata update date')) |
247
|
|
|
|
248
|
|
|
@property |
249
|
|
|
def _metadata(self): |
250
|
|
|
if not hasattr(self, '_metadata_cache'): |
251
|
|
|
self._metadata_cache = self.load_file() |
252
|
|
|
return self._metadata_cache |
253
|
|
|
|
254
|
|
|
def __unicode__(self): |
255
|
|
|
return self.name |
256
|
|
|
|
257
|
|
|
def get_entity_metadata(self, entityid): |
258
|
|
|
return self._metadata.get_entity(entityid) |
259
|
|
|
|
260
|
|
|
def get_entity(self, entityid): |
261
|
|
|
return self.entity_set.get(entityid=entityid) |
262
|
|
|
|
263
|
|
|
def process_metadata(self): |
264
|
|
|
metadata = self.load_file() |
265
|
|
|
|
266
|
|
|
if self.file_id and metadata.file_id and metadata.file_id == self.file_id: |
267
|
|
|
return |
268
|
|
|
else: |
269
|
|
|
self.file_id = metadata.file_id |
270
|
|
|
|
271
|
|
|
if not metadata: |
272
|
|
|
return |
273
|
|
|
if not metadata.is_federation: |
274
|
|
|
raise XmlDescriptionError("XML Haven't federation form") |
275
|
|
|
|
276
|
|
|
update_obj(metadata.get_federation(), self) |
277
|
|
|
|
278
|
|
|
def _remove_deleted_entities(self, entities_from_xml, request): |
279
|
|
|
entities_to_remove = [] |
280
|
|
|
for entity in self.entity_set.all(): |
281
|
|
|
#Remove entity relation if does not exist in metadata |
282
|
|
|
if not entity.entityid in entities_from_xml: |
283
|
|
|
entities_to_remove.append(entity) |
284
|
|
|
|
285
|
|
|
if len(entities_to_remove) > 0: |
286
|
|
|
self.entity_set.remove(*entities_to_remove) |
287
|
|
|
|
288
|
|
|
if request: |
289
|
|
|
for entity in entities_to_remove: |
290
|
|
|
if not entity.federations.exists(): |
291
|
|
|
messages.warning(request, |
292
|
|
|
mark_safe(_("Orphan entity: <a href='%s'>%s</a>" % |
293
|
|
|
(entity.get_absolute_url(), entity.entityid)))) |
294
|
|
|
|
295
|
|
|
return len(entities_to_remove) |
296
|
|
|
|
297
|
|
|
def _update_entities(self, entities_to_update, entities_to_add): |
298
|
|
|
for e in entities_to_update: |
299
|
|
|
e.save() |
300
|
|
|
|
301
|
|
|
self.entity_set.add(*entities_to_add) |
302
|
|
|
|
303
|
|
|
@staticmethod |
304
|
|
|
def _entity_has_changed(entity, entityid, name, registration_authority): |
305
|
|
|
if entity.entityid != entityid: |
306
|
|
|
return True |
307
|
|
|
if entity.name != name: |
308
|
|
|
return True |
309
|
|
|
if entity.registration_authority != registration_authority: |
310
|
|
|
return True |
311
|
|
|
|
312
|
|
|
return False |
313
|
|
|
|
314
|
|
|
def _add_new_entities(self, entities, entities_from_xml, request, federation_slug): |
315
|
|
|
db_entity_types = EntityType.objects.all() |
316
|
|
|
cached_entity_types = { entity_type.xmlname: entity_type for entity_type in db_entity_types } |
317
|
|
|
|
318
|
|
|
entities_to_add = [] |
319
|
|
|
entities_to_update = [] |
320
|
|
|
|
321
|
|
|
for m_id in entities_from_xml: |
322
|
|
|
if request and federation_slug: |
323
|
|
|
request.session['%s_cur_entities' % federation_slug] += 1 |
324
|
|
|
request.session.save() |
325
|
|
|
|
326
|
|
|
created = False |
327
|
|
|
if m_id in entities: |
328
|
|
|
entity = entities[m_id] |
329
|
|
|
else: |
330
|
|
|
entity, created = Entity.objects.get_or_create(entityid=m_id) |
331
|
|
|
|
332
|
|
|
entityid = entity.entityid |
333
|
|
|
name = entity.name |
334
|
|
|
registration_authority = entity.registration_authority |
335
|
|
|
|
336
|
|
|
entity_from_xml = self._metadata.get_entity(m_id, True) |
337
|
|
|
entity.process_metadata(False, entity_from_xml, cached_entity_types) |
338
|
|
|
|
339
|
|
|
if created or self._entity_has_changed(entity, entityid, name, registration_authority): |
340
|
|
|
entities_to_update.append(entity) |
341
|
|
|
|
342
|
|
|
entities_to_add.append(entity) |
343
|
|
|
|
344
|
|
|
self._update_entities(entities_to_update, entities_to_add) |
345
|
|
|
return len(entities_to_update) |
346
|
|
|
|
347
|
|
|
@staticmethod |
348
|
|
|
def _daterange(start_date, end_date): |
349
|
|
|
for n in range(int ((end_date - start_date).days + 1)): |
350
|
|
|
yield start_date + timedelta(n) |
351
|
|
|
|
352
|
|
|
def compute_new_stats(self): |
353
|
|
|
entities_from_xml = self._metadata.get_entities() |
354
|
|
|
|
355
|
|
|
entities = Entity.objects.filter(entityid__in=entities_from_xml) |
356
|
|
|
entities = entities.prefetch_related('types') |
357
|
|
|
|
358
|
|
|
try: |
359
|
|
|
first_date = EntityStat.objects.filter(federation=self).aggregate(Max('time'))['time__max'] |
360
|
|
|
if not first_date: |
361
|
|
|
raise Exception('Not able to find statistical data in the DB.') |
362
|
|
|
except Exception: |
363
|
|
|
first_date = datetime(2010, 1, 1) |
364
|
|
|
first_date = pytz.utc.localize(first_date) |
365
|
|
|
|
366
|
|
|
for curtimestamp in self._daterange(first_date, timezone.now()): |
367
|
|
|
computed = {} |
368
|
|
|
not_computed = [] |
369
|
|
|
entity_stats = [] |
370
|
|
|
for feature in stats['features'].keys(): |
371
|
|
|
fun = getattr(self, 'get_%s' % feature, None) |
372
|
|
|
|
373
|
|
|
if callable(fun): |
374
|
|
|
stat = EntityStat() |
375
|
|
|
stat.feature = feature |
376
|
|
|
stat.time = curtimestamp |
377
|
|
|
stat.federation = self |
378
|
|
|
stat.value = fun(entities, stats['features'][feature], curtimestamp) |
379
|
|
|
entity_stats.append(stat) |
380
|
|
|
computed[feature] = stat.value |
381
|
|
|
else: |
382
|
|
|
not_computed.append(feature) |
383
|
|
|
|
384
|
|
|
from_time = datetime.combine(curtimestamp, time.min) |
385
|
|
|
if timezone.is_naive(from_time): |
386
|
|
|
from_time = pytz.utc.localize(from_time) |
387
|
|
|
to_time = datetime.combine(curtimestamp, time.max) |
388
|
|
|
if timezone.is_naive(to_time): |
389
|
|
|
to_time = pytz.utc.localize(to_time) |
390
|
|
|
|
391
|
|
|
EntityStat.objects.filter(federation=self, time__gte=from_time, time__lte=to_time).delete() |
392
|
|
|
EntityStat.objects.bulk_create(entity_stats) |
393
|
|
|
|
394
|
|
|
return (computed, not_computed) |
395
|
|
|
|
396
|
|
|
def process_metadata_entities(self, request=None, federation_slug=None): |
397
|
|
|
entities_from_xml = self._metadata.get_entities() |
398
|
|
|
removed = self._remove_deleted_entities(entities_from_xml, request) |
399
|
|
|
|
400
|
|
|
entities = {} |
401
|
|
|
db_entities = Entity.objects.filter(entityid__in=entities_from_xml) |
402
|
|
|
db_entities = db_entities.prefetch_related('types') |
403
|
|
|
|
404
|
|
|
for entity in db_entities.all(): |
405
|
|
|
entities[entity.entityid] = entity |
406
|
|
|
|
407
|
|
|
if request and federation_slug: |
408
|
|
|
request.session['%s_num_entities' % federation_slug] = len(entities_from_xml) |
409
|
|
|
request.session['%s_cur_entities' % federation_slug] = 0 |
410
|
|
|
request.session['%s_process_done' % federation_slug] = False |
411
|
|
|
request.session.save() |
412
|
|
|
|
413
|
|
|
updated = self._add_new_entities(entities, entities_from_xml, request, federation_slug) |
414
|
|
|
|
415
|
|
|
if request and federation_slug: |
416
|
|
|
request.session['%s_process_done' % federation_slug] = True |
417
|
|
|
request.session.save() |
418
|
|
|
|
419
|
|
|
return removed, updated |
420
|
|
|
|
421
|
|
|
def get_absolute_url(self): |
422
|
|
|
return reverse('federation_view', args=[self.slug]) |
423
|
|
|
|
424
|
|
|
@classmethod |
425
|
|
|
def get_sp(cls, entities, xml_name, ref_date = None): |
426
|
|
|
count = 0 |
427
|
|
|
for entity in entities: |
428
|
|
|
reginst = None |
429
|
|
|
if entity.registration_instant: |
430
|
|
|
reginst = pytz.utc.localize(entity.registration_instant) |
431
|
|
|
if not ref_date or (reginst and reginst > ref_date): |
432
|
|
|
continue |
433
|
|
|
cur_cached_types = [t.xmlname for t in entity.types.all()] |
434
|
|
|
if xml_name in cur_cached_types: |
435
|
|
|
count += 1 |
436
|
|
|
return count |
437
|
|
|
|
438
|
|
|
@classmethod |
439
|
|
|
def get_idp(cls, entities, xml_name, ref_date = None): |
440
|
|
|
count = 0 |
441
|
|
|
for entity in entities: |
442
|
|
|
reginst = None |
443
|
|
|
if entity.registration_instant: |
444
|
|
|
reginst = pytz.utc.localize(entity.registration_instant) |
445
|
|
|
if not ref_date or (reginst and reginst > ref_date): |
446
|
|
|
continue |
447
|
|
|
cur_cached_types = [t.xmlname for t in entity.types.all()] |
448
|
|
|
if xml_name in cur_cached_types: |
449
|
|
|
count += 1 |
450
|
|
|
return count |
451
|
|
|
|
452
|
|
|
def get_sp_saml1(self, entities, xml_name, ref_date = None): |
453
|
|
|
return self.get_stat_protocol(entities, xml_name, 'SPSSODescriptor', ref_date) |
454
|
|
|
|
455
|
|
|
def get_sp_saml2(self, entities, xml_name, ref_date = None): |
456
|
|
|
return self.get_stat_protocol(entities, xml_name, 'SPSSODescriptor', ref_date) |
457
|
|
|
|
458
|
|
|
def get_sp_shib1(self, entities, xml_name, ref_date = None): |
459
|
|
|
return self.get_stat_protocol(entities, xml_name, 'SPSSODescriptor', ref_date) |
460
|
|
|
|
461
|
|
|
def get_idp_saml1(self, entities, xml_name, ref_date = None): |
462
|
|
|
return self.get_stat_protocol(entities, xml_name, 'IDPSSODescriptor', ref_date) |
463
|
|
|
|
464
|
|
|
def get_idp_saml2(self, entities, xml_name, ref_date = None): |
465
|
|
|
return self.get_stat_protocol(entities, xml_name, 'IDPSSODescriptor', ref_date) |
466
|
|
|
|
467
|
|
|
def get_idp_shib1(self, entities, xml_name, ref_date = None): |
468
|
|
|
return self.get_stat_protocol(entities, xml_name, 'IDPSSODescriptor', ref_date) |
469
|
|
|
|
470
|
|
|
def get_stat_protocol(self, entities, xml_name, service_type, ref_date): |
471
|
|
|
count = 0 |
472
|
|
|
for entity in entities: |
473
|
|
|
reginst = None |
474
|
|
|
if entity.registration_instant: |
475
|
|
|
reginst = pytz.utc.localize(entity.registration_instant) |
476
|
|
|
if not ref_date or (reginst and reginst > ref_date): |
477
|
|
|
continue |
478
|
|
|
|
479
|
|
|
try: |
480
|
|
|
cur_cached_types = [t.xmlname for t in entity.types.all()] |
481
|
|
|
if service_type in cur_cached_types and Entity.READABLE_PROTOCOLS[xml_name] in entity.display_protocols: |
482
|
|
|
count += 1 |
483
|
|
|
except Exception: |
484
|
|
|
pass |
485
|
|
|
return count |
486
|
|
|
|
487
|
|
|
def can_edit(self, user, delete): |
488
|
|
|
if user.is_superuser: |
489
|
|
|
return True |
490
|
|
|
|
491
|
|
|
permission = 'delete_federation' if delete else 'change_federation' |
492
|
|
|
if user.has_perm('metadataparser.%s' % permission) and user in self.editor_users.all(): |
493
|
|
|
return True |
494
|
|
|
return False |
495
|
|
|
|
496
|
|
|
|
497
|
|
|
class EntityQuerySet(QuerySet): |
498
|
|
|
def iterator(self): |
499
|
|
|
cached_federations = {} |
500
|
|
|
for entity in super(EntityQuerySet, self).iterator(): |
501
|
|
|
if entity.file: |
502
|
|
|
continue |
503
|
|
|
|
504
|
|
|
federations = entity.federations.all() |
505
|
|
|
if federations: |
506
|
|
|
federation = federations[0] |
507
|
|
|
else: |
508
|
|
|
raise ValueError("Can't find entity metadata") |
509
|
|
|
|
510
|
|
|
for federation in federations: |
511
|
|
|
if not federation.id in cached_federations: |
512
|
|
|
cached_federations[federation.id] = federation |
513
|
|
|
|
514
|
|
|
cached_federation = cached_federations[federation.id] |
515
|
|
|
try: |
516
|
|
|
entity.load_metadata(federation=cached_federation) |
517
|
|
|
except ValueError: |
518
|
|
|
# Allow entity in federation but not in federation file |
519
|
|
|
continue |
520
|
|
|
else: |
521
|
|
|
break |
522
|
|
|
|
523
|
|
|
yield entity |
524
|
|
|
|
525
|
|
|
|
526
|
|
|
class EntityManager(models.Manager): |
527
|
|
|
def get_queryset(self): |
528
|
|
|
return EntityQuerySet(self.model, using=self._db) |
529
|
|
|
|
530
|
|
|
|
531
|
|
|
class EntityType(models.Model): |
532
|
|
|
name = models.CharField(blank=False, max_length=20, unique=True, |
533
|
|
|
verbose_name=_(u'Name'), db_index=True) |
534
|
|
|
xmlname = models.CharField(blank=False, max_length=20, unique=True, |
535
|
|
|
verbose_name=_(u'Name in XML'), db_index=True) |
536
|
|
|
|
537
|
|
|
def __unicode__(self): |
538
|
|
|
return self.name |
539
|
|
|
|
540
|
|
|
|
541
|
|
|
class Entity(Base): |
542
|
|
|
READABLE_PROTOCOLS = { |
543
|
|
|
'urn:oasis:names:tc:SAML:1.1:protocol': 'SAML 1.1', |
544
|
|
|
'urn:oasis:names:tc:SAML:2.0:protocol': 'SAML 2.0', |
545
|
|
|
'urn:mace:shibboleth:1.0': 'Shiboleth 1.0', |
546
|
|
|
} |
547
|
|
|
|
548
|
|
|
entityid = models.CharField(blank=False, max_length=200, unique=True, |
549
|
|
|
verbose_name=_(u'EntityID'), db_index=True) |
550
|
|
|
federations = models.ManyToManyField(Federation, |
551
|
|
|
verbose_name=_(u'Federations')) |
552
|
|
|
|
553
|
|
|
types = models.ManyToManyField(EntityType, verbose_name=_(u'Type')) |
554
|
|
|
|
555
|
|
|
name = JSONField(blank=True, null=True, max_length=2000, |
556
|
|
|
verbose_name=_(u'Display Name')) |
557
|
|
|
|
558
|
|
|
objects = models.Manager() |
559
|
|
|
longlist = EntityManager() |
560
|
|
|
|
561
|
|
|
curfed = None |
562
|
|
|
|
563
|
|
|
@property |
564
|
|
|
def registration_authority_xml(self): |
565
|
|
|
return self._get_property('registration_authority') |
566
|
|
|
|
567
|
|
|
@property |
568
|
|
|
def registration_policy(self): |
569
|
|
|
return self._get_property('registration_policy') |
570
|
|
|
|
571
|
|
|
@property |
572
|
|
|
def registration_instant(self): |
573
|
|
|
reginstant = self._get_property('registration_instant') |
574
|
|
|
if reginstant is None: |
575
|
|
|
return None |
576
|
|
|
reginstant = "%sZ" % reginstant[0:19] |
577
|
|
|
return datetime.strptime(reginstant, '%Y-%m-%dT%H:%M:%SZ') |
578
|
|
|
|
579
|
|
|
@property |
580
|
|
|
def protocols(self): |
581
|
|
|
return ' '.join(self._get_property('protocols')) |
582
|
|
|
|
583
|
|
|
@property |
584
|
|
|
def languages(self): |
585
|
|
|
return ' '.join(self._get_property('languages')) |
586
|
|
|
|
587
|
|
|
@property |
588
|
|
|
def scopes(self): |
589
|
|
|
return ' '.join(self._get_property('scopes')) |
590
|
|
|
|
591
|
|
|
@property |
592
|
|
|
def attributes(self): |
593
|
|
|
attributes = self._get_property('attr_requested') |
594
|
|
|
if not attributes: |
595
|
|
|
return [] |
596
|
|
|
return attributes['required'] |
597
|
|
|
|
598
|
|
|
@property |
599
|
|
|
def attributes_optional(self): |
600
|
|
|
attributes = self._get_property('attr_requested') |
601
|
|
|
if not attributes: |
602
|
|
|
return [] |
603
|
|
|
return attributes['optional'] |
604
|
|
|
|
605
|
|
|
@property |
606
|
|
|
def organization(self): |
607
|
|
|
organization = self._get_property('organization') |
608
|
|
|
if not organization: |
609
|
|
|
return [] |
610
|
|
|
|
611
|
|
|
vals = [] |
612
|
|
|
for lang, data in organization.items(): |
613
|
|
|
data['lang'] = lang |
614
|
|
|
vals.append(data) |
615
|
|
|
|
616
|
|
|
return vals |
617
|
|
|
|
618
|
|
|
@property |
619
|
|
|
def display_name(self): |
620
|
|
|
return self._get_property('displayName') |
621
|
|
|
|
622
|
|
|
@property |
623
|
|
|
def federations_count(self): |
624
|
|
|
return str(self.federations.all().count()) |
625
|
|
|
|
626
|
|
|
@property |
627
|
|
|
def description(self): |
628
|
|
|
return self._get_property('description') |
629
|
|
|
|
630
|
|
|
@property |
631
|
|
|
def info_url(self): |
632
|
|
|
return self._get_property('infoUrl') |
633
|
|
|
|
634
|
|
|
@property |
635
|
|
|
def privacy_url(self): |
636
|
|
|
return self._get_property('privacyUrl') |
637
|
|
|
|
638
|
|
|
@property |
639
|
|
|
def xml(self): |
640
|
|
|
return self._get_property('xml') |
641
|
|
|
|
642
|
|
|
@property |
643
|
|
|
def xml_types(self): |
644
|
|
|
return self._get_property('entity_types') |
645
|
|
|
|
646
|
|
|
@property |
647
|
|
|
def display_protocols(self): |
648
|
|
|
protocols = [] |
649
|
|
|
|
650
|
|
|
xml_protocols = self._get_property('protocols') |
651
|
|
|
if xml_protocols: |
652
|
|
|
for proto in xml_protocols: |
653
|
|
|
protocols.append(self.READABLE_PROTOCOLS.get(proto, proto)) |
654
|
|
|
|
655
|
|
|
return protocols |
656
|
|
|
|
657
|
|
|
def display_attributes(self): |
658
|
|
|
attributes = {} |
659
|
|
|
for [attr, friendly] in self.attributes: |
660
|
|
|
if friendly: |
661
|
|
|
attributes[attr] = friendly |
662
|
|
|
elif attr in attributemap.MAP['fro']: |
663
|
|
|
attributes[attr] = attributemap.MAP['fro'][attr] |
664
|
|
|
else: |
665
|
|
|
attributes[attr] = '?' |
666
|
|
|
return attributes |
667
|
|
|
|
668
|
|
|
def display_attributes_optional(self): |
669
|
|
|
attributes = {} |
670
|
|
|
for [attr, friendly] in self.attributes_optional: |
671
|
|
|
if friendly: |
672
|
|
|
attributes[attr] = friendly |
673
|
|
|
elif attr in attributemap.MAP['fro']: |
674
|
|
|
attributes[attr] = attributemap.MAP['fro'][attr] |
675
|
|
|
else: |
676
|
|
|
attributes[attr] = '?' |
677
|
|
|
return attributes |
678
|
|
|
|
679
|
|
|
@property |
680
|
|
|
def contacts(self): |
681
|
|
|
contacts = [] |
682
|
|
|
for cur_contact in self._get_property('contacts'): |
683
|
|
|
if cur_contact['name'] and cur_contact['surname']: |
684
|
|
|
contact_name = '%s %s' % (cur_contact['name'], cur_contact['surname']) |
685
|
|
|
elif cur_contact['name']: |
686
|
|
|
contact_name = cur_contact['name'] |
687
|
|
|
elif cur_contact['surname']: |
688
|
|
|
contact_name = cur_contact['surname'] |
689
|
|
|
else: |
690
|
|
|
contact_name = urlparse(cur_contact['email']).path.partition('?')[0] |
691
|
|
|
c_type = 'undefined' |
692
|
|
|
if cur_contact['type']: |
693
|
|
|
c_type = cur_contact['type'] |
694
|
|
|
contacts.append({ 'name': contact_name, 'email': cur_contact['email'], 'type': c_type }) |
695
|
|
|
return contacts |
696
|
|
|
|
697
|
|
|
@property |
698
|
|
|
def logos(self): |
699
|
|
|
logos = [] |
700
|
|
|
for cur_logo in self._get_property('logos'): |
701
|
|
|
cur_logo['external'] = True |
702
|
|
|
logos.append(cur_logo) |
703
|
|
|
|
704
|
|
|
return logos |
705
|
|
|
|
706
|
|
|
class Meta(object): |
707
|
|
|
verbose_name = _(u'Entity') |
708
|
|
|
verbose_name_plural = _(u'Entities') |
709
|
|
|
|
710
|
|
|
def __unicode__(self): |
711
|
|
|
return self.entityid |
712
|
|
|
|
713
|
|
|
def load_metadata(self, federation=None, entity_data=None): |
714
|
|
|
if hasattr(self, '_entity_cached'): |
715
|
|
|
return |
716
|
|
|
|
717
|
|
|
if self.file: |
718
|
|
|
self._entity_cached = self.load_file().get_entity(self.entityid) |
719
|
|
|
elif federation: |
720
|
|
|
self._entity_cached = federation.get_entity_metadata(self.entityid) |
721
|
|
|
elif entity_data: |
722
|
|
|
self._entity_cached = entity_data |
723
|
|
|
else: |
724
|
|
|
right_fed = None |
725
|
|
|
first_fed = None |
726
|
|
|
for fed in self.federations.all(): |
727
|
|
|
if fed.registration_authority == self.registration_authority: |
728
|
|
|
right_fed = fed |
729
|
|
|
if first_fed is None: |
730
|
|
|
first_fed = fed |
731
|
|
|
|
732
|
|
|
if right_fed is not None: |
733
|
|
|
entity_cached = right_fed.get_entity_metadata(self.entityid) |
734
|
|
|
self._entity_cached = entity_cached |
735
|
|
|
else: |
736
|
|
|
entity_cached = first_fed.get_entity_metadata(self.entityid) |
737
|
|
|
self._entity_cached = entity_cached |
738
|
|
|
|
739
|
|
|
if not hasattr(self, '_entity_cached'): |
740
|
|
|
raise ValueError("Can't find entity metadata") |
741
|
|
|
|
742
|
|
|
def _get_property(self, prop, federation=None): |
743
|
|
|
try: |
744
|
|
|
self.load_metadata(federation or self.curfed) |
745
|
|
|
except ValueError: |
746
|
|
|
return None |
747
|
|
|
|
748
|
|
|
if hasattr(self, '_entity_cached'): |
749
|
|
|
return self._entity_cached.get(prop, None) |
750
|
|
|
else: |
751
|
|
|
raise ValueError("Not metadata loaded") |
752
|
|
|
|
753
|
|
|
def _get_or_create_etypes(self, cached_entity_types): |
754
|
|
|
entity_types = [] |
755
|
|
|
cur_cached_types = [t.xmlname for t in self.types.all()] |
756
|
|
|
for etype in self.xml_types: |
757
|
|
|
if etype in cur_cached_types: |
758
|
|
|
break |
759
|
|
|
|
760
|
|
|
if cached_entity_types is None: |
761
|
|
|
entity_type, _ = EntityType.objects.get_or_create(xmlname=etype, |
762
|
|
|
name=DESCRIPTOR_TYPES_DISPLAY[etype]) |
763
|
|
|
else: |
764
|
|
|
if etype in cached_entity_types: |
765
|
|
|
entity_type = cached_entity_types[etype] |
766
|
|
|
else: |
767
|
|
|
entity_type = EntityType.objects.create(xmlname=etype, |
768
|
|
|
name=DESCRIPTOR_TYPES_DISPLAY[etype]) |
769
|
|
|
entity_types.append(entity_type) |
770
|
|
|
return entity_types |
771
|
|
|
|
772
|
|
|
def process_metadata(self, auto_save=True, entity_data=None, cached_entity_types=None): |
773
|
|
|
if not entity_data: |
774
|
|
|
self.load_metadata() |
775
|
|
|
|
776
|
|
|
if self.entityid.lower() != entity_data.get('entityid').lower(): |
777
|
|
|
raise ValueError("EntityID is not the same: %s != %s" % (self.entityid.lower(), entity_data.get('entityid').lower())) |
778
|
|
|
|
779
|
|
|
self._entity_cached = entity_data |
780
|
|
|
if self.xml_types: |
781
|
|
|
entity_types = self._get_or_create_etypes(cached_entity_types) |
782
|
|
|
|
783
|
|
|
if len(entity_types) > 0: |
784
|
|
|
self.types.add(*entity_types) |
785
|
|
|
|
786
|
|
|
newname = self._get_property('displayName') |
787
|
|
|
if newname and newname != '': |
788
|
|
|
self.name = newname |
789
|
|
|
|
790
|
|
|
if str(self._get_property('registration_authority')) != '': |
791
|
|
|
self.registration_authority = self._get_property('registration_authority') |
792
|
|
|
|
793
|
|
|
if auto_save: |
794
|
|
|
self.save() |
795
|
|
|
|
796
|
|
|
def to_dict(self): |
797
|
|
|
self.load_metadata() |
798
|
|
|
|
799
|
|
|
entity = self._entity_cached.copy() |
800
|
|
|
entity["types"] = [unicode(f) for f in self.types.all()] |
801
|
|
|
entity["federations"] = [{u"name": unicode(f), u"url": f.get_absolute_url()} |
802
|
|
|
for f in self.federations.all()] |
803
|
|
|
|
804
|
|
|
if self.registration_authority: |
805
|
|
|
entity["registration_authority"] = self.registration_authority |
806
|
|
|
if self.registration_instant: |
807
|
|
|
entity["registration_instant"] = '%s' % self.registration_instant |
808
|
|
|
|
809
|
|
|
if "file_id" in entity.keys(): |
810
|
|
|
del entity["file_id"] |
811
|
|
|
if "entity_types" in entity.keys(): |
812
|
|
|
del entity["entity_types"] |
813
|
|
|
|
814
|
|
|
return entity |
815
|
|
|
|
816
|
|
|
def display_etype(value, separator=', '): |
817
|
|
|
return separator.join([unicode(item) for item in value.all()]) |
818
|
|
|
|
819
|
|
|
@classmethod |
820
|
|
|
def get_most_federated_entities(self, maxlength=TOP_LENGTH, cache_expire=None): |
821
|
|
|
entities = None |
822
|
|
|
if cache_expire: |
823
|
|
|
entities = cache.get("most_federated_entities") |
824
|
|
|
|
825
|
|
|
if not entities or len(entities) < maxlength: |
826
|
|
|
# Entities with count how many federations belongs to, and sorted by most first |
827
|
|
|
ob_entities = Entity.objects.all().annotate(federationslength=Count("federations")).order_by("-federationslength") |
828
|
|
|
ob_entities = ob_entities.prefetch_related('types', 'federations') |
829
|
|
|
ob_entities = ob_entities[:maxlength] |
830
|
|
|
|
831
|
|
|
entities = [] |
832
|
|
|
for entity in ob_entities: |
833
|
|
|
entities.append({ |
834
|
|
|
'entityid': entity.entityid, |
835
|
|
|
'name': entity.name, |
836
|
|
|
'absolute_url': entity.get_absolute_url(), |
837
|
|
|
'types': [unicode(item) for item in entity.types.all()], |
838
|
|
|
'federations': [(unicode(item.name), item.get_absolute_url()) for item in entity.federations.all()], |
839
|
|
|
}) |
840
|
|
|
|
841
|
|
|
if cache_expire: |
842
|
|
|
cache.set("most_federated_entities", entities, cache_expire) |
843
|
|
|
|
844
|
|
|
return entities[:maxlength] |
845
|
|
|
|
846
|
|
|
def get_absolute_url(self): |
847
|
|
|
return reverse('entity_view', args=[quote_plus(self.entityid)]) |
848
|
|
|
|
849
|
|
|
def can_edit(self, user, delete): |
850
|
|
|
permission = 'delete_entity' if delete else 'change_entity' |
851
|
|
|
if user.is_superuser or (user.has_perm('metadataparser.%s' % permission) and user in self.editor_users.all()): |
852
|
|
|
return True |
853
|
|
|
|
854
|
|
|
for federation in self.federations.all(): |
855
|
|
|
if federation.can_edit(user, False): |
856
|
|
|
return True |
857
|
|
|
|
858
|
|
|
return False |
859
|
|
|
|
860
|
|
|
class EntityStat(models.Model): |
861
|
|
|
time = models.DateTimeField(blank=False, null=False, |
862
|
|
|
verbose_name=_(u'Metadata time stamp')) |
863
|
|
|
feature = models.CharField(max_length=100, blank=False, null=False, db_index=True, |
864
|
|
|
verbose_name=_(u'Feature name')) |
865
|
|
|
|
866
|
|
|
value = models.PositiveIntegerField(max_length=100, blank=False, null=False, |
867
|
|
|
verbose_name=_(u'Feature value')) |
868
|
|
|
|
869
|
|
|
federation = models.ForeignKey(Federation, blank = False, |
870
|
|
|
verbose_name=_(u'Federations')) |
871
|
|
|
|
872
|
|
|
def __unicode__(self): |
873
|
|
|
return self.feature |
874
|
|
|
|
875
|
|
|
|
876
|
|
|
class Dummy(models.Model): |
877
|
|
|
pass |
878
|
|
|
|
879
|
|
|
|
880
|
|
|
@receiver(pre_save, sender=Federation, dispatch_uid='federation_pre_save') |
881
|
|
|
def federation_pre_save(sender, instance, **kwargs): |
882
|
|
|
# Skip pre_save if only file name is saved |
883
|
|
|
if kwargs.has_key('update_fields') and kwargs['update_fields'] == set(['file']): |
884
|
|
|
return |
885
|
|
|
|
886
|
|
|
slug = slugify(unicode(instance.name))[:200] |
887
|
|
|
if instance.file_url and instance.file_url != '': |
888
|
|
|
instance.fetch_metadata_file(slug) |
889
|
|
|
if instance.name: |
890
|
|
|
instance.slug = slugify(unicode(instance))[:200] |
891
|
|
|
|
892
|
|
|
|
893
|
|
|
@receiver(pre_save, sender=Entity, dispatch_uid='entity_pre_save') |
894
|
|
|
def entity_pre_save(sender, instance, **kwargs): |
895
|
|
|
if instance.file_url: |
896
|
|
|
slug = slugify(unicode(instance.name))[:200] |
897
|
|
|
instance.fetch_metadata_file(slug) |
898
|
|
|
instance.process_metadata() |
899
|
|
|
|