| Total Complexity | 7 |
| Total Lines | 26 |
| Duplicated Lines | 0 % |
| 1 | import hashlib |
||
| 14 | class UrlCache(object): |
||
| 15 | _md5_sum = {} |
||
| 16 | _counter_lock = threading.Lock() |
||
| 17 | |||
| 18 | @classmethod |
||
| 19 | def get_md5(cls, file): |
||
| 20 | try: |
||
| 21 | return cls._md5_sum[file] |
||
| 22 | except KeyError: |
||
| 23 | with cls._counter_lock: |
||
| 24 | md5 = base64.urlsafe_b64encode(cls.calc_md5(file)) |
||
| 25 | value = '%s%s?v=%s' % (STATIC_URL, file, md5.decode('utf-8')) |
||
| 26 | cls._md5_sum[file] = value |
||
| 27 | return value |
||
| 28 | |||
| 29 | @classmethod |
||
| 30 | def calc_md5(cls, file): |
||
| 31 | full_path = join(settings.STATIC_ROOT, file) |
||
| 32 | with open(full_path, 'rb') as fh: |
||
| 33 | m = hashlib.md5() |
||
| 34 | while True: |
||
| 35 | data = fh.read(8192) |
||
| 36 | if not data: |
||
| 37 | break |
||
| 38 | m.update(data) |
||
| 39 | return m.digest() |
||
| 40 | |||
| 45 |