Code Duplication    Length = 24-24 lines in 2 locations

omaha_server/omaha/tasks.py 2 locations

@@ 66-89 (lines=24) @@
63
                logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
64
65
66
@app.task(name='tasks.auto_delete_size_is_exceeded', ignore_result=True)
67
def auto_delete_size_is_exceeded():
68
    logger = logging.getLogger('limitation')
69
    model_list = [
70
        ('crash', 'Crash'),
71
        ('feedback', 'Feedback')
72
    ]
73
    for model in model_list:
74
        result = delete_size_is_exceeded(*model)
75
        if result.get('count', 0):
76
            log_id = str(uuid.uuid4())
77
            params = dict(log_id=log_id)
78
            splunk_url = get_splunk_url(params)
79
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
80
            ids_list = sorted([element['id'] for element in result['elements']])
81
            raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list}
82
            raven.captureMessage("[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %s [%d]" %
83
                                 (result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()),
84
                                 data=dict(level=20, logger='limitation'), extra=raven_extra)
85
            extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='size_is_exceeded')
86
            logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
87
            for element in result['elements']:
88
                element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')})
89
                logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
90
91
92
@app.task(name='tasks.auto_delete_duplicate_crashes', ignore_result=True)
@@ 40-63 (lines=24) @@
37
    statistics.collect_statistics(parse_request(request), ip=ip)
38
39
40
@app.task(name='tasks.auto_delete_older_then', ignore_result=True)
41
def auto_delete_older_than():
42
    logger = logging.getLogger('limitation')
43
    model_list = [
44
        ('crash', 'Crash'),
45
        ('feedback', 'Feedback')
46
    ]
47
    for model in model_list:
48
        result = delete_older_than(*model)
49
        if result.get('count', 0):
50
            log_id = str(uuid.uuid4())
51
            params = dict(log_id=log_id)
52
            splunk_url = get_splunk_url(params)
53
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
54
            ids_list = sorted([element['id'] for element in result['elements']])
55
            raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list}
56
            raven.captureMessage("[Limitation]Periodic task 'Older than' cleaned up %d %s, total size of cleaned space is %s [%d]" %
57
                                 (result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()),
58
                                 data=dict(level=20, logger='limitation'), extra=raven_extra)
59
            extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='old')
60
            logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
61
            for element in result['elements']:
62
                element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')})
63
                logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
64
65
66
@app.task(name='tasks.auto_delete_size_is_exceeded', ignore_result=True)