1
|
|
|
# coding: utf8 |
2
|
|
|
|
3
|
|
|
""" |
4
|
|
|
This software is licensed under the Apache 2 license, quoted below. |
5
|
|
|
|
6
|
|
|
Copyright 2014 Crystalnix Limited |
7
|
|
|
|
8
|
|
|
Licensed under the Apache License, Version 2.0 (the "License"); you may not |
9
|
|
|
use this file except in compliance with the License. You may obtain a copy of |
10
|
|
|
the License at |
11
|
|
|
|
12
|
|
|
http://www.apache.org/licenses/LICENSE-2.0 |
13
|
|
|
|
14
|
|
|
Unless required by applicable law or agreed to in writing, software |
15
|
|
|
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
16
|
|
|
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
17
|
|
|
License for the specific language governing permissions and limitations under |
18
|
|
|
the License. |
19
|
|
|
""" |
20
|
|
|
import time |
21
|
|
|
import logging |
22
|
|
|
import uuid |
23
|
|
|
import boto |
24
|
|
|
|
25
|
|
|
|
26
|
|
|
from django.template import defaultfilters as filters |
27
|
|
|
|
28
|
|
|
from omaha_server.celery import app |
29
|
|
|
from omaha_server.utils import add_extra_to_log_message, get_splunk_url |
30
|
|
|
from omaha import statistics |
31
|
|
|
from omaha.parser import parse_request |
32
|
|
|
from omaha.limitation import ( |
33
|
|
|
delete_older_than, |
34
|
|
|
delete_size_is_exceeded, |
35
|
|
|
delete_duplicate_crashes, |
36
|
|
|
monitoring_size, |
37
|
|
|
raven, |
38
|
|
|
handle_dangling_files |
39
|
|
|
) |
40
|
|
|
from omaha.models import Version |
41
|
|
|
from sparkle.models import SparkleVersion |
42
|
|
|
from crash.models import Crash, Symbols |
43
|
|
|
from feedback.models import Feedback |
44
|
|
|
|
45
|
|
|
logger = logging.getLogger(__name__) |
46
|
|
|
|
47
|
|
|
|
48
|
|
|
@app.task(ignore_result=True) |
49
|
|
|
def collect_statistics(request, ip=None): |
50
|
|
|
statistics.collect_statistics(parse_request(request), ip=ip) |
51
|
|
|
|
52
|
|
|
|
53
|
|
View Code Duplication |
@app.task(name='tasks.auto_delete_older_then', ignore_result=True) |
|
|
|
|
54
|
|
|
def auto_delete_older_than(): |
55
|
|
|
logger = logging.getLogger('limitation') |
56
|
|
|
model_list = [ |
57
|
|
|
('crash', 'Crash'), |
58
|
|
|
('feedback', 'Feedback') |
59
|
|
|
] |
60
|
|
|
for model in model_list: |
61
|
|
|
result = delete_older_than(*model) |
62
|
|
|
if result.get('count', 0): |
63
|
|
|
log_id = str(uuid.uuid4()) |
64
|
|
|
params = dict(log_id=log_id) |
65
|
|
|
splunk_url = get_splunk_url(params) |
66
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
67
|
|
|
ids_list = sorted([element['id'] for element in result['elements']]) |
68
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list} |
69
|
|
|
raven.captureMessage("[Limitation]Periodic task 'Older than' cleaned up %d %s, total size of cleaned space is %s [%d]" % |
70
|
|
|
(result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()), |
71
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
72
|
|
|
extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='old') |
73
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) |
74
|
|
|
for element in result['elements']: |
75
|
|
|
element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')}) |
76
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) |
77
|
|
|
|
78
|
|
|
|
79
|
|
View Code Duplication |
@app.task(name='tasks.auto_delete_size_is_exceeded', ignore_result=True) |
|
|
|
|
80
|
|
|
def auto_delete_size_is_exceeded(): |
81
|
|
|
logger = logging.getLogger('limitation') |
82
|
|
|
model_list = [ |
83
|
|
|
('crash', 'Crash'), |
84
|
|
|
('feedback', 'Feedback') |
85
|
|
|
] |
86
|
|
|
for model in model_list: |
87
|
|
|
result = delete_size_is_exceeded(*model) |
88
|
|
|
if result.get('count', 0): |
89
|
|
|
log_id = str(uuid.uuid4()) |
90
|
|
|
params = dict(log_id=log_id) |
91
|
|
|
splunk_url = get_splunk_url(params) |
92
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
93
|
|
|
ids_list = sorted([element['id'] for element in result['elements']]) |
94
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list} |
95
|
|
|
raven.captureMessage("[Limitation]Periodic task 'Size is exceeded' cleaned up %d %s, total size of cleaned space is %s [%d]" % |
96
|
|
|
(result['count'], model[1], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()), |
97
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
98
|
|
|
extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), model=model[1], reason='size_is_exceeded') |
99
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) |
100
|
|
|
for element in result['elements']: |
101
|
|
|
element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')}) |
102
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) |
103
|
|
|
|
104
|
|
|
|
105
|
|
|
@app.task(name='tasks.auto_delete_duplicate_crashes', ignore_result=True) |
106
|
|
|
def auto_delete_duplicate_crashes(): |
107
|
|
|
logger = logging.getLogger('limitation') |
108
|
|
|
result = delete_duplicate_crashes() |
109
|
|
|
if result.get('count', 0): |
110
|
|
|
log_id = str(uuid.uuid4()) |
111
|
|
|
params = dict(log_id=log_id) |
112
|
|
|
splunk_url = get_splunk_url(params) |
113
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
114
|
|
|
ids_list = sorted([element['id'] for element in result['elements']]) |
115
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "crash_list": ids_list} |
116
|
|
|
raven.captureMessage("[Limitation]Periodic task 'Duplicated' cleaned up %d crashes, total size of cleaned space is %s [%d]" % |
117
|
|
|
(result['count'], filters.filesizeformat(result['size']).replace(u'\xa0', u' '), time.time()), |
118
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
119
|
|
|
extra = dict(log_id=log_id, meta=True, count=result['count'], size=filters.filesizeformat(result['size']).replace(u'\xa0', u' '), reason='duplicated', model='Crash') |
120
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup', extra=extra)) |
121
|
|
|
for element in result['elements']: |
122
|
|
|
element.update({"log_id": log_id, "Crash_id": element.pop('id')}) |
123
|
|
|
logger.info(add_extra_to_log_message('Automatic cleanup element', extra=element)) |
124
|
|
|
|
125
|
|
|
|
126
|
|
|
@app.task(name='tasks.deferred_manual_cleanup') |
127
|
|
|
def deferred_manual_cleanup(model, limit_size=None, limit_days=None, limit_duplicated=None): |
128
|
|
|
logger = logging.getLogger('limitation') |
129
|
|
|
full_result = dict(count=0, size=0, elements=[]) |
130
|
|
|
if limit_duplicated: |
131
|
|
|
result = delete_duplicate_crashes(limit=limit_duplicated) |
132
|
|
|
if result.get('count', 0): |
133
|
|
|
full_result['count'] += result['count'] |
134
|
|
|
full_result['size'] += result['size'] |
135
|
|
|
full_result['elements'] += result['elements'] |
136
|
|
|
|
137
|
|
|
if limit_days: |
138
|
|
|
result = delete_older_than(*model, limit=limit_days) |
139
|
|
|
if result.get('count', 0): |
140
|
|
|
full_result['count'] += result['count'] |
141
|
|
|
full_result['size'] += result['size'] |
142
|
|
|
full_result['elements'] += result['elements'] |
143
|
|
|
|
144
|
|
|
if limit_size: |
145
|
|
|
result = delete_size_is_exceeded(*model, limit=limit_size) |
146
|
|
|
if result.get('count', 0): |
147
|
|
|
full_result['count'] += result['count'] |
148
|
|
|
full_result['size'] += result['size'] |
149
|
|
|
full_result['elements'] += result['elements'] |
150
|
|
|
|
151
|
|
|
log_id = str(uuid.uuid4()) |
152
|
|
|
params = dict(log_id=log_id) |
153
|
|
|
splunk_url = get_splunk_url(params) |
154
|
|
|
splunk_filter = 'log_id=%s' % log_id if splunk_url else None |
155
|
|
|
ids_list = sorted([element['id'] for element in full_result['elements']]) |
156
|
|
|
raven_extra = {"id": log_id, "splunk_url": splunk_url, "splunk_filter": splunk_filter, "%s_list" % (model[1]): ids_list} |
157
|
|
|
raven.captureMessage("[Limitation]Manual cleanup freed %d %s, total size of cleaned space is %s [%s]" % |
158
|
|
|
(full_result['count'], model[1], filters.filesizeformat(full_result['size']).replace(u'\xa0', u' '), log_id), |
159
|
|
|
data=dict(level=20, logger='limitation'), extra=raven_extra) |
160
|
|
|
|
161
|
|
|
extra = dict(log_id=log_id, meta=True, count=full_result['count'], size=filters.filesizeformat(full_result['size']).replace(u'\xa0', u' '), model=model[1], |
162
|
|
|
limit_duplicated=limit_duplicated, limit_size=limit_size, limit_days=limit_days, reason='manual') |
163
|
|
|
logger.info(add_extra_to_log_message('Manual cleanup', extra=extra)) |
164
|
|
|
for element in full_result['elements']: |
165
|
|
|
element.update({"log_id": log_id, "%s_id" % (model[1]): element.pop('id')}) |
166
|
|
|
logger.info(add_extra_to_log_message('Manual cleanup element', extra=element)) |
167
|
|
|
|
168
|
|
|
|
169
|
|
|
@app.task(name='tasks.auto_monitoring_size', ignore_result=True) |
170
|
|
|
def auto_monitoring_size(): |
171
|
|
|
monitoring_size() |
172
|
|
|
|
173
|
|
|
|
174
|
|
|
def get_prefix(model_name): |
175
|
|
|
model_path_prefix = { |
176
|
|
|
Crash: ('minidump', 'minidump_archive'), |
177
|
|
|
Feedback: ('blackbox', 'system_logs', 'feedback_attach', 'screenshot'), |
178
|
|
|
Symbols: ('symbols',), |
179
|
|
|
Version: ('build',), |
180
|
|
|
SparkleVersion: ('sparkle',) |
181
|
|
|
} |
182
|
|
|
return model_path_prefix[model_name] |
183
|
|
|
|
184
|
|
|
|
185
|
|
|
@app.task(name='tasks.auto_delete_dangling_files', ignore_result=True) |
186
|
|
|
def auto_delete_dangling_files(): |
187
|
|
|
logger = logging.getLogger('limitation') |
188
|
|
|
model_kwargs_list = [ |
189
|
|
|
{'model': Crash, 'file_fields': ('upload_file_minidump', 'archive')}, |
190
|
|
|
{'model': Feedback, 'file_fields': ('blackbox', 'system_logs', 'attached_file', 'screenshot')}, |
191
|
|
|
{'model': Symbols, 'file_fields': ('file', )}, |
192
|
|
|
{'model': Version, 'file_fields': ('file', )}, |
193
|
|
|
{'model': SparkleVersion, 'file_fields': ('file', )} |
194
|
|
|
] |
195
|
|
|
for model_kwargs in model_kwargs_list: |
196
|
|
|
result = handle_dangling_files( |
197
|
|
|
prefix=get_prefix(model_kwargs['model']), |
198
|
|
|
**model_kwargs |
199
|
|
|
) |
200
|
|
|
if result['mark'] == 'db': |
201
|
|
|
logger.info('Dangling files detected in db [%d], files path: %s' % (result['count'], result['data'])) |
202
|
|
|
raven.captureMessage( |
203
|
|
|
"[Limitation]Dangling files detected in db, total: %d" % result['count'], |
204
|
|
|
data=dict(level=20, logger='limitation') |
205
|
|
|
) |
206
|
|
|
elif result['mark'] == 's3': |
207
|
|
|
logger.info('Dangling files deleted from s3 [%d], files path: %s' % (result['count'], result['data'])) |
208
|
|
|
raven.captureMessage( |
209
|
|
|
"[Limitation]Dangling files deleted from s3, cleaned up %d files" % result['count'], |
210
|
|
|
data=dict(level=20, logger='limitation') |
211
|
|
|
) |
212
|
|
|
else: |
213
|
|
|
logger.info('Dangling files not detected') |
214
|
|
|
|