1
|
|
|
# coding: utf8 |
2
|
|
|
|
3
|
|
|
""" |
4
|
|
|
This software is licensed under the Apache 2 license, quoted below. |
5
|
|
|
|
6
|
|
|
Copyright 2014 Crystalnix Limited |
7
|
|
|
|
8
|
|
|
Licensed under the Apache License, Version 2.0 (the "License"); you may not |
9
|
|
|
use this file except in compliance with the License. You may obtain a copy of |
10
|
|
|
the License at |
11
|
|
|
|
12
|
|
|
http://www.apache.org/licenses/LICENSE-2.0 |
13
|
|
|
|
14
|
|
|
Unless required by applicable law or agreed to in writing, software |
15
|
|
|
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
16
|
|
|
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
17
|
|
|
License for the specific language governing permissions and limitations under |
18
|
|
|
the License. |
19
|
|
|
""" |
20
|
|
|
import time |
21
|
|
|
import logging |
22
|
|
|
import uuid |
23
|
|
|
|
24
|
|
|
from django.template import defaultfilters as filters |
25
|
|
|
|
26
|
|
|
from omaha_server.celery import app |
27
|
|
|
from omaha_server.utils import add_extra_to_log_message, get_splunk_url |
28
|
|
|
from omaha import statistics |
29
|
|
|
from omaha.parser import parse_request |
30
|
|
|
from omaha.limitation import delete_older_than, delete_size_is_exceeded, delete_duplicate_crashes, monitoring_size, raven |
31
|
|
|
|
32
|
|
|
logger = logging.getLogger(__name__) |
33
|
|
|
|
34
|
|
|
|
35
|
|
|
@app.task(ignore_result=True) |
36
|
|
|
def collect_statistics(request, ip=None): |
37
|
|
|
statistics.collect_statistics(parse_request(request), ip=ip) |
38
|
|
|
|
39
|
|
|
|
40
|
|
View Code Duplication |
@app.task(name='tasks.auto_delete_older_then', ignore_result=True) |
|
|
|
|
41
|
|
|
def auto_delete_older_than(): |
42
|
|
|
logger = logging.getLogger('limitation') |
43
|
|
|
model_list = [ |
44
|
|
|
('crash', 'Crash'), |
45
|
|
|
('feedback', 'Feedback') |
46
|
|
|
] |
47
|
|
|
for model in model_list: |
48
|
|
|
result = delete_older_than(*model) |
49
|
|
|
if result.get('count', 0): |
50
|
|
|
log_id = str(uuid.uuid4()) |
51
|
|
|
params = dict(log_id=log_id) |
52
|
|
|
splunk_url = get_splunk_url(params) |
53
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
54
|
|
|
ids_list = sorted([element['id'] for element in result['elements']]) |
55
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list} |
56
|
|
|
raven.captureMessage("[Limitation]Periodic task 'Older than' cleaned up %d %s, total size of cleaned space is %s [%d]" % |
57
|
|
|
(result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()), |
58
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
59
|
|
|
extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='old') |
60
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) |
61
|
|
|
for element in result['elements']: |
62
|
|
|
element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')}) |
63
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) |
64
|
|
|
|
65
|
|
|
|
66
|
|
View Code Duplication |
@app.task(name='tasks.auto_delete_size_is_exceeded', ignore_result=True) |
|
|
|
|
67
|
|
|
def auto_delete_size_is_exceeded(): |
68
|
|
|
logger = logging.getLogger('limitation') |
69
|
|
|
model_list = [ |
70
|
|
|
('crash', 'Crash'), |
71
|
|
|
('feedback', 'Feedback') |
72
|
|
|
] |
73
|
|
|
for model in model_list: |
74
|
|
|
result = delete_size_is_exceeded(*model) |
75
|
|
|
if result.get('count', 0): |
76
|
|
|
log_id = str(uuid.uuid4()) |
77
|
|
|
params = dict(log_id=log_id) |
78
|
|
|
splunk_url = get_splunk_url(params) |
79
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
80
|
|
|
ids_list = sorted([element['id'] for element in result['elements']]) |
81
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list} |
82
|
|
|
raven.captureMessage("[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %s [%d]" % |
83
|
|
|
(result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()), |
84
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
85
|
|
|
extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='size_is_exceeded') |
86
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) |
87
|
|
|
for element in result['elements']: |
88
|
|
|
element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')}) |
89
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) |
90
|
|
|
|
91
|
|
|
|
92
|
|
|
@app.task(name='tasks.auto_delete_duplicate_crashes', ignore_result=True) |
93
|
|
|
def auto_delete_duplicate_crashes(): |
94
|
|
|
logger = logging.getLogger('limitation') |
95
|
|
|
result = delete_duplicate_crashes() |
96
|
|
|
if result.get('count', 0): |
97
|
|
|
log_id = str(uuid.uuid4()) |
98
|
|
|
params = dict(log_id=log_id) |
99
|
|
|
splunk_url = get_splunk_url(params) |
100
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
101
|
|
|
ids_list = sorted([element['id'] for element in result['elements']]) |
102
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "crash_list": ids_list} |
103
|
|
|
raven.captureMessage("[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %s [%d]" % |
104
|
|
|
(result['count'], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()), |
105
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
106
|
|
|
extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), reason='duplicated', model='Crash') |
107
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) |
108
|
|
|
for element in result['elements']: |
109
|
|
|
element.update({"log_id": log_id, "Crash_id": element.pop('id')}) |
110
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) |
111
|
|
|
|
112
|
|
|
|
113
|
|
|
@app.task(name='tasks.deferred_manual_cleanup') |
114
|
|
|
def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_duplicated=None): |
115
|
|
|
logger = logging.getLogger('limitation') |
116
|
|
|
full_result = dict(count=0, size=0, elements=[]) |
117
|
|
|
if limit_duplicated: |
118
|
|
|
result = delete_duplicate_crashes(limit=limit_duplicated) |
119
|
|
|
if result.get('count', 0): |
120
|
|
|
full_result['count'] += result['count'] |
121
|
|
|
full_result['size'] += result['size'] |
122
|
|
|
full_result['elements'] += result['elements'] |
123
|
|
|
|
124
|
|
|
if limit_days: |
125
|
|
|
result = delete_older_than(*model, limit=limit_days) |
126
|
|
|
if result.get('count', 0): |
127
|
|
|
full_result['count'] += result['count'] |
128
|
|
|
full_result['size'] += result['size'] |
129
|
|
|
full_result['elements'] += result['elements'] |
130
|
|
|
|
131
|
|
|
if limit_size: |
132
|
|
|
result = delete_size_is_exceeded(*model, limit=limit_size) |
133
|
|
|
if result.get('count', 0): |
134
|
|
|
full_result['count'] += result['count'] |
135
|
|
|
full_result['size'] += result['size'] |
136
|
|
|
full_result['elements'] += result['elements'] |
137
|
|
|
|
138
|
|
|
log_id = str(uuid.uuid4()) |
139
|
|
|
params = dict(log_id=log_id) |
140
|
|
|
splunk_url = get_splunk_url(params) |
141
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
142
|
|
|
ids_list = sorted([element['id'] for element in full_result['elements']]) |
143
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list} |
144
|
|
|
raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %s [%s]" % |
145
|
|
|
(full_result['count'], model[1], filters.filesizeformat(full_result['size']).replace(u'\xa0', u' '), log_id), |
146
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
147
|
|
|
|
148
|
|
|
extra = dict(log_id=log_id, meta=True, count=full_result['count'], size=filters.filesizeformat(full_result['size']).replace(u'\xa0', u' '), model=model[1], |
149
|
|
|
limit_duplicated=limit_duplicated, limit_size=limit_size, limit_days=limit_days, reason='manual') |
150
|
|
|
logger.info(add_extra_to_log_message('Manual cleanup', extra=extra)) |
151
|
|
|
for element in full_result['elements']: |
152
|
|
|
element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')}) |
153
|
|
|
logger.info(add_extra_to_log_message('Manual cleanup element', extra=element)) |
154
|
|
|
|
155
|
|
|
|
156
|
|
|
@app.task(name='tasks.auto_monitoring_size', ignore_result=True) |
157
|
|
|
def auto_monitoring_size(): |
158
|
|
|
monitoring_size() |
159
|
|
|
|