Completed
Push — master ( 7f6883...3d6838 )
by Egor
02:22
created

collect_statistics()   A

Complexity

Conditions 1

Size

Total Lines 3

Duplication

Lines 0
Ratio 0 %
Metric Value
dl 0
loc 3
rs 10
cc 1
1
# coding: utf8
2
3
"""
4
This software is licensed under the Apache 2 license, quoted below.
5
6
Copyright 2014 Crystalnix Limited
7
8
Licensed under the Apache License, Version 2.0 (the "License"); you may not
9
use this file except in compliance with the License. You may obtain a copy of
10
the License at
11
12
    http://www.apache.org/licenses/LICENSE-2.0
13
14
Unless required by applicable law or agreed to in writing, software
15
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
16
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
17
License for the specific language governing permissions and limitations under
18
the License.
19
"""
20
import time
21
import logging
22
import uuid
23
24
from omaha_server.celery import app
25
from omaha_server.utils import add_extra_to_log_message, get_splunk_url
26
from omaha import statistics
27
from omaha.parser import parse_request
28
from omaha.limitation import delete_older_than, delete_size_is_exceeded, delete_duplicate_crashes, monitoring_size, raven
29
30
logger = logging.getLogger(__name__)
31
32
@app.task(ignore_result=True)
33
def collect_statistics(request, ip=None):
34
    statistics.collect_statistics(parse_request(request), ip=ip)
35
36
37
@app.task(name='tasks.auto_delete_older_then', ignore_result=True)
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.

Duplicated code is one of the most pungent code smells. If you need to duplicate the same code in three or more different places, we strongly encourage you to look into extracting the code into a single class or operation.

You can also find more detailed suggestions in the “Code” section of your repository.

Loading history...
38
def auto_delete_older_than():
39
    logger = logging.getLogger('limitation')
40
    model_list = [
41
        ('crash', 'Crash'),
42
        ('feedback', 'Feedback')
43
    ]
44
    for model in model_list:
45
        result = delete_older_than(*model)
46
        if result.get('count', 0):
47
            result['size'] /= 1024.0 * 1024
48
            log_id = str(uuid.uuid4())
49
            params = dict(log_id=log_id)
50
            splunk_url = get_splunk_url(params)
51
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
52
            raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter)
53
            raven.captureMessage("[Limitation]Periodic task 'Older than' cleaned up %d %s, total size of cleaned space is %.2f Mb[%d]" %
54
                                 (result['count'], model[1], result['size'], time.time()),
55
                                 data=dict(level=20, logger='limitation'), extra=raven_extra)
56
            extra = dict(log_id=log_id, meta=True, count=result['count'], size=result['size'], model=model[1], reason='old')
57
            logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
58
            for element in result['elements']:
59
                element.update(dict(log_id=log_id))
60
                logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
61
62
@app.task(name='tasks.auto_delete_size_is_exceeded', ignore_result=True)
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.

Duplicated code is one of the most pungent code smells. If you need to duplicate the same code in three or more different places, we strongly encourage you to look into extracting the code into a single class or operation.

You can also find more detailed suggestions in the “Code” section of your repository.

Loading history...
63
def auto_delete_size_is_exceeded():
64
    logger = logging.getLogger('limitation')
65
    model_list = [
66
        ('crash', 'Crash'),
67
        ('feedback', 'Feedback')
68
    ]
69
    for model in model_list:
70
        result = delete_size_is_exceeded(*model)
71
        if result.get('count', 0):
72
            result['size'] /= 1024.0 * 1024
73
            log_id = str(uuid.uuid4())
74
            params = dict(log_id=log_id)
75
            splunk_url = get_splunk_url(params)
76
            splunk_filter = 'log_id=%s' % log_id if splunk_url else None
77
            raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter)
78
            raven.captureMessage("[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %.2f Mb[%d]" %
79
                                 (result['count'], model[1], result['size'], time.time()),
80
                                 data=dict(level=20, logger='limitation'), extra=raven_extra)
81
            extra = dict(log_id=log_id, meta=True, count=result['count'], size=result['size'], model=model[1], reason='size_is_exceeded')
82
            logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
83
            for element in result['elements']:
84
                element.update(dict(log_id=log_id))
85
                logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
86
87
88
@app.task(name='tasks.auto_delete_duplicate_crashes', ignore_result=True)
89
def auto_delete_duplicate_crashes():
90
    logger = logging.getLogger('limitation')
91
    result = delete_duplicate_crashes()
92
    if result.get('count', 0):
93
        result['size'] /= 1024.0 * 1024
94
        log_id = str(uuid.uuid4())
95
        params = dict(log_id=log_id)
96
        splunk_url = get_splunk_url(params)
97
        splunk_filter = 'log_id=%s' % log_id if splunk_url else None
98
        raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter)
99
        raven.captureMessage("[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %.2f Mb[%d]" %
100
                             (result['count'], result['size'], time.time()),
101
                             data=dict(level=20, logger='limitation'), extra=raven_extra)
102
        extra = dict(log_id=log_id, meta=True, count=result['count'], size=result['size'], reason='duplicated', model='Crash')
103
        logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra))
104
        for element in result['elements']:
105
            element.update(dict(log_id=log_id))
106
            logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element))
107
108
@app.task(name='tasks.deferred_manual_cleanup')
109
def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_duplicated=None):
110
    logger = logging.getLogger('limitation')
111
    full_result = dict(count=0, size=0, elements=[])
112
    if limit_duplicated:
113
        result = delete_duplicate_crashes(limit=limit_duplicated)
114
        if result.get('count', 0):
115
            full_result['count'] += result['count']
116
            full_result['size'] += result['size']
117
            full_result['elements'] += result['elements']
118
119
    if limit_days:
120
        result = delete_older_than(*model, limit=limit_days)
121
        if result.get('count', 0):
122
            full_result['count'] += result['count']
123
            full_result['size'] += result['size']
124
            full_result['elements'] += result['elements']
125
126
    if limit_size:
127
        result = delete_size_is_exceeded(*model, limit=limit_size)
128
        if result.get('count', 0):
129
            full_result['count'] += result['count']
130
            full_result['size'] += result['size']
131
            full_result['elements'] += result['elements']
132
133
    full_result['size'] /= 1024.0 * 1024
134
    log_id = str(uuid.uuid4())
135
    params = dict(log_id=log_id)
136
    splunk_url = get_splunk_url(params)
137
    splunk_filter = 'log_id=%s' % log_id if splunk_url else None
138
    raven_extra = dict(id=log_id, splunk_url=splunk_url, splunk_filter=splunk_filter)
139
    raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %.2f Mb[%s]" %
140
                         (full_result['count'], model[1], full_result['size'], log_id),
141
                         data=dict(level=20, logger='limitation'), extra=raven_extra)
142
143
    extra = dict(log_id=log_id, meta=True, count=full_result['count'], size=full_result['size'], model=model[1],
144
                 limit_duplicated=limit_duplicated, limit_size=limit_size, limit_days=limit_days, reason='manual')
145
    logger.info(add_extra_to_log_message('Manual cleanup', extra=extra))
146
    for element in full_result['elements']:
147
        element.update(dict(log_id=log_id))
148
        logger.info(add_extra_to_log_message('Manual cleanup element', extra=element))
149
150
151
@app.task(name='tasks.auto_monitoring_size', ignore_result=True)
152
def auto_monitoring_size():
153
    monitoring_size()
154