Completed
Push — master ( 62a151...74c045 )
by
unknown
53s
created

BaseS3Test.test_dangling_delete_s3()   A

Complexity

Conditions 4

Size

Total Lines 21

Duplication

Lines 0
Ratio 0 %

Importance

Changes 1
Bugs 0 Features 0
Metric Value
cc 4
c 1
b 0
f 0
dl 0
loc 21
rs 9.0534
1
import os
2
from django.test import TestCase, override_settings
3
4
import moto
5
import boto
6
import mock
7
import omaha
8
from boto.s3.key import Key
9
10
from crash.factories import CrashFactoryWithFiles, SymbolsFactory
11
from feedback.factories import FeedbackFactory
12
from omaha.factories import VersionFactory
13
from sparkle.factories import SparkleVersionFactory
14
15
from crash.models import Crash, Symbols
16
from feedback.models import Feedback
17
from omaha.models import Version
18
from omaha.tests import OverloadTestStorageMixin
19
from sparkle.models import SparkleVersion
20
from omaha_server.utils import storage_with_spaces_instance
21
from omaha.limitation import bulk_delete
22
from storages.backends.s3boto import S3BotoStorage
23
from omaha.tasks import get_prefix
24 View Code Duplication
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
25
26
class BaseS3Test(object):
27
    model = None
28
    factory = None
29
    file_fields = None
30
    files = None
31
32
    @moto.mock_s3
33
    def test_model_delete(self):
34
        conn = boto.connect_s3()
35
        conn.create_bucket('test')
36
        obj = self.factory()
37
38
        keys = conn.get_bucket('test').get_all_keys()
39 View Code Duplication
        keys = [key.name for key in keys]
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
40
        for field in self.file_fields:
41
            self.assertIn(getattr(obj, field).name, keys)
42
43
        obj.delete()
44
        keys = conn.get_bucket('test').get_all_keys()
45
        self.assertFalse(keys)
46
47
    @moto.mock_s3
48
    def test_model_update(self):
49
        conn = boto.connect_s3()
50
        conn.create_bucket('test')
51
        obj = self.factory()
52
        new_obj = self.factory()
53
54
        old_keys = conn.get_bucket('test').get_all_keys()
55
        old_keys = [key.name for key in old_keys]
56
57
        for field in self.file_fields:
58
            self.assertIn(getattr(obj, field).name, old_keys)
59
            setattr(obj, field, getattr(new_obj, field))
60
            obj.save()
61
62
        new_keys = conn.get_bucket('test').get_all_keys()
63
        self.assertFalse(set(old_keys) & set(new_keys))
64
65
    @moto.mock_s3
66
    def test_bulk_delete(self):
67
        conn = boto.connect_s3()
68
        conn.create_bucket('test')
69
        self.factory.create_batch(10)
70
        qs = self.model.objects.all()
71
        self.assertEqual(qs.count(), 10)
72
        keys = conn.get_bucket('test').get_all_keys()
73
        self.assertEqual(len(keys), len(self.file_fields) * 10)
74
        with mock.patch('boto.__init__') as my_mock:
75
            my_mock.connect_s3.return_value = conn
76
            try:                                    # When we try to delete nonexistent key from s3 in pre_delete signal
77
                bulk_delete(self.model, qs)         # original boto doesn't raise S3ResponseError: 404 Not Found
78
            except boto.exception.S3ResponseError:  # but mocked boto does
79
                pass
80
81
        keys = conn.get_bucket('test').get_all_keys()
82
        self.assertFalse(keys)
83
84
    @moto.mock_s3
85
    def test_dangling_delete_db(self):
86
        conn = boto.connect_s3()
87
        conn.create_bucket('test')
88
        bucket = conn.get_bucket('test')
89
        self.factory.create_batch(2)
90
        keys = [key.name for key in conn.get_bucket('test').get_all_keys()]
91
        bucket.delete_key(keys[0])
92
        result = omaha.limitation.handle_dangling_files(
93
            self.model,
94
            get_prefix(self.model),
95
            self.file_fields
96
        )
97
        self.assertEqual(result['status'], 'Send notifications')
98
99
    @moto.mock_s3
100
    def test_dangling_delete_s3(self):
101
        # create bucket and send file in s3
102
        conn = boto.connect_s3()
103
        conn.create_bucket('test')
104
        bucket = conn.get_bucket('test')
105
        prefix = get_prefix(self.model)
106
        for f in self.files:
107
            k = Key(bucket, '%s/%s' % (f['prefix'], f['file_path']))
108
            with open(f['file_path']) as test_file:
109
                k.send_file(test_file)
110
        # create 2 files in db
111
        self.factory.create_batch(2)
112
        result = omaha.limitation.handle_dangling_files(
113
            self.model,
114
            prefix,
115
            self.file_fields
116
        )
117
        for _file in result['data']:
118
            self.assertFalse(
119
                _file in [key.name for key in conn.get_bucket('test').get_all_keys()]
120
            )
121
122
123
@override_settings(DEFAULT_FILE_STORAGE='storages.backends.s3boto.S3BotoStorage')
124
class CrashS3Test(BaseS3Test, TestCase):
125
    model = Crash
126
    factory = CrashFactoryWithFiles
127
    file_fields = ['archive', 'upload_file_minidump']
128
    files = ({
129
        'prefix': 'minidump_archive',
130
        'file_path': os.path.abspath(
131
            'crash/tests/testdata/7b05e196-7e23-416b-bd13-99287924e214.dmp'
132
        )
133
    },)
134
135
136
@override_settings(DEFAULT_FILE_STORAGE='storages.backends.s3boto.S3BotoStorage')
137
class FeedbackS3Test(BaseS3Test, TestCase):
138
    model = Feedback
139
    factory = FeedbackFactory
140
    file_fields = ['screenshot', 'blackbox', 'system_logs', 'attached_file']
141
    files = ({
142
        'prefix': 'screenshot',
143
        'file_path': os.path.abspath('feedback/tests/testdata/test_png.png')
144
    },)
145
146
147
@override_settings(DEFAULT_FILE_STORAGE='storages.backends.s3boto.S3BotoStorage')
148
class SymbolsS3Test(BaseS3Test, TestCase):
149
    model = Symbols
150
    factory = SymbolsFactory
151
    file_fields = ['file']
152
    files = ({
153
        'prefix': 'symbols',
154
        'file_path': os.path.abspath(
155
            'crash/tests/testdata/symbols/BreakpadTestApp.pdb/C1C0FA629EAA4B4D9DD2ADE270A231CC1/BreakpadTestApp.sym'
156
        )
157
    },)
158
159
    def setUp(self):
160
        storage_with_spaces_instance._setup()
161
162
163
@override_settings(DEFAULT_FILE_STORAGE='storages.backends.s3boto.S3BotoStorage')
164
class OmahaVersionS3Test(OverloadTestStorageMixin, BaseS3Test, TestCase):
165
    model = Version
166
    factory = VersionFactory
167
    file_fields = ['file']
168
    files = ({
169
        'prefix': 'build',
170
        'file_path': os.path.abspath('feedback/tests/testdata/test_png.png')
171
    },)
172
173
174
@override_settings(DEFAULT_FILE_STORAGE='storages.backends.s3boto.S3BotoStorage')
175
class SparkleVersionS3Test(OverloadTestStorageMixin, BaseS3Test, TestCase):
176
    model = SparkleVersion
177
    factory = SparkleVersionFactory
178
    file_fields = ['file']
179
    files = ({
180
        'prefix': 'sparkle',
181
        'file_path': os.path.abspath('feedback/tests/testdata/test_png.png')
182
    },)
183