Completed
Pull Request — master (#58)
by
unknown
01:10
created

MockElasticsearchReportBackend   A

Complexity

Total Complexity 1

Size/Duplication

Total Lines 14
Duplicated Lines 0 %

Importance

Changes 1
Bugs 0 Features 0
Metric Value
c 1
b 0
f 0
dl 0
loc 14
rs 10
wmc 1

1 Method

Rating   Name   Duplication   Size   Complexity  
A __init__() 0 13 1
1
import json
2
import logging
3
from io import BytesIO
4
from io import StringIO
5
try:
6
    import unittest.mock as mock
7
except ImportError:
8
    import mock
9
10
import py
11
import pytest
12
from freezegun import freeze_time
13
14
from pytest_benchmark import plugin
15
from pytest_benchmark.plugin import BenchmarkSession
16
from pytest_benchmark.plugin import pytest_benchmark_compare_machine_info
17
from pytest_benchmark.plugin import pytest_benchmark_generate_json
18
from pytest_benchmark.plugin import pytest_benchmark_group_stats
19
from pytest_benchmark.report_backend import ElasticReportBackend
20
from pytest_benchmark.elasticsearch_storage import ElasticsearchStorage
21
22
23
THIS = py.path.local(__file__)
24
BENCHFILE = THIS.dirpath('test_storage/0030_5b78858eb718649a31fb93d8dc96ca2cee41a4cd_20150815_030419_uncommitted-changes.json')
25
SAVE_DATA = json.load(BENCHFILE.open('rU'))
26
SAVE_DATA["machine_info"] = {'foo': 'bar'}
27
SAVE_DATA["commit_info"] = {'foo': 'bar'}
28
29
tmp = SAVE_DATA.copy()
30
31
ES_DATA = tmp.pop("benchmarks")[0]
32
ES_DATA.update(tmp)
33
34
35
class Namespace(object):
36
    def __init__(self, **kwargs):
37
        self.__dict__.update(kwargs)
38
39
    def __getitem__(self, item):
40
        return self.__dict__[item]
41
42
43
class LooseFileLike(BytesIO):
44
    def close(self):
45
        value = self.getvalue()
46
        super(LooseFileLike, self).close()
47
        self.getvalue = lambda: value
48
49
50
class MockElasticsearchReportBackend(ElasticReportBackend):
51
    def __init__(self, config):
52
        self.verbose = False
53
        self.logger = logging.getLogger(__name__)
54
        self.config = config
55
        self.performance_regressions = []
56
        self.benchmarks = []
57
        self.machine_id = "FoobarOS"
58
        self.storage = mock.Mock(spec=ElasticsearchStorage)
59
        self.compare = '0001'
60
        self.save = self.autosave = self.json = False
61
        self.elasticsearch_hosts = ["localhost:9200"]
62
        self.elasticsearch_index = "benchmark"
63
        self.elasticsearch_doctype = "benchmark"
64
65
66
class MockSession(BenchmarkSession):
67
    def __init__(self):
68
        self.histogram = True
69
        self.benchmarks = []
70
        self.performance_regressions = []
71
        self.sort = u"min"
72
        self.logger = logging.getLogger(__name__)
73
        self.machine_id = "FoobarOS"
74
        self.machine_info = {'foo': 'bar'}
75
        self.options = {
76
            'min_rounds': 123,
77
            'min_time': 234,
78
            'max_time': 345,
79
        }
80
        self.compare_fail = []
81
        self.config = Namespace(hook=Namespace(
82
            pytest_benchmark_group_stats=pytest_benchmark_group_stats,
83
            pytest_benchmark_generate_machine_info=lambda **kwargs: {'foo': 'bar'},
84
            pytest_benchmark_update_machine_info=lambda **kwargs: None,
85
            pytest_benchmark_compare_machine_info=pytest_benchmark_compare_machine_info,
86
            pytest_benchmark_generate_json=pytest_benchmark_generate_json,
87
            pytest_benchmark_update_json=lambda **kwargs: None,
88
            pytest_benchmark_generate_commit_info=lambda **kwargs: {'foo': 'bar'},
89
            pytest_benchmark_update_commit_info=lambda **kwargs: None,
90
        ))
91
        self.elasticsearch_host = "localhost:9200"
92
        self.elasticsearch_index = "benchmark"
93
        self.elasticsearch_doctype = "benchmark"
94
        self.report_backend = MockElasticsearchReportBackend(self.config)
95
        self.group_by = 'group'
96
        self.columns = ['min', 'max', 'mean', 'stddev', 'median', 'iqr',
97
                        'outliers', 'rounds', 'iterations']
98
        self.benchmarks = []
99
        with BENCHFILE.open('rU') as fh:
100
            data = json.load(fh)
101
        self.benchmarks.extend(
102
            Namespace(
103
                as_dict=lambda include_data=False, stats=True, flat=False, _bench=bench:
104
                    dict(_bench, **_bench["stats"]) if flat else dict(_bench),
105
                name=bench['name'],
106
                fullname=bench['fullname'],
107
                group=bench['group'],
108
                options=bench['options'],
109
                has_error=False,
110
                params=None,
111
                **bench['stats']
112
            )
113
            for bench in data['benchmarks']
114
        )
115
116
117
try:
118
    text_type = unicode
119
except NameError:
120
    text_type = str
121
122
123
def force_text(text):
124
    if isinstance(text, text_type):
125
        return text
126
    else:
127
        return text.decode('utf-8')
128
129
130
def force_bytes(text):
131
    if isinstance(text, text_type):
132
        return text.encode('utf-8')
133
    else:
134
        return text
135
136
137 View Code Duplication
def make_logger(sess):
0 ignored issues
show
Duplication introduced by
This code seems to be duplicated in your project.
Loading history...
138
    output = StringIO()
139
    sess.logger = Namespace(
140
        warn=lambda code, text, **opts: output.write(u"%s: %s %s\n" % (code, force_text(text), opts)),
141
        info=lambda text, **opts: output.write(force_text(text) + u'\n'),
142
        error=lambda text: output.write(force_text(text) + u'\n'),
143
    )
144
    sess.report_backend.logger = Namespace(
145
        warn=lambda code, text, **opts: output.write(u"%s: %s %s\n" % (code, force_text(text), opts)),
146
        info=lambda text, **opts: output.write(force_text(text) + u'\n'),
147
        error=lambda text: output.write(force_text(text) + u'\n'),
148
    )
149
    return output
150
151
152
@pytest.fixture
153
def sess():
154
    return MockSession()
155
156
157
@pytest.fixture
158
def logger_output(sess):
159
    return make_logger(sess)
160
161
162
@freeze_time("2015-08-15T00:04:18.687119")
163
def test_handle_saving(sess, logger_output, monkeypatch):
164
    monkeypatch.setattr(plugin, '__version__', '2.5.0')
165
    sess.report_backend.save = "commitId"
166
    sess.report_backend.autosave = True
167
    sess.report_backend.json = None
168
    sess.report_backend.save_data = False
169
    sess.report_backend.handle_saving(sess.benchmarks, sess.machine_info)
170
    sess.report_backend.storage.save.assert_called_with(ES_DATA, 'commitId_tests/test_normal.py::test_xfast_parametrized[0]')
171