ProfileCollectdJsonEncoder.__init__()   A
last analyzed

Complexity

Conditions 1

Size

Total Lines 10

Duplication

Lines 0
Ratio 0 %

Importance

Changes 1
Bugs 0 Features 0
Metric Value
cc 1
dl 0
loc 10
rs 9.4285
c 1
b 0
f 0
1
#!/usr/bin/env python
2
from profilehooks import profile
3
from kafka_influxdb.encoder import collectd_json_encoder
4
5
6
class ProfileCollectdJsonEncoder:
7
8
    def __init__(self, num_messages=10000):
9
        self.encoder = collectd_json_encoder.Encoder()
10
        self.sample_messages = b"""
11
            [{"values":[0.6],"dstypes":["gauge"],"dsnames":["value"],"time":1444745144.824,"interval":10.000,"host":"xx.example.internal","plugin":"cpu","plugin_instance":"1","type":"percent","type_instance":"system"}]
12
            [{"values":[0.7],"dstypes":["gauge"],"dsnames":["value"],"time":1444745144.824,"interval":10.000,"host":"example.com","plugin":"cpu","plugin_instance":"1","type":"percent","type_instance":"user"}]
13
            [{"values":[37.7],"dstypes":["gauge"],"dsnames":["value"],"time":1444745144.824,"interval":10.000,"host":"myhost","plugin":"cpu","plugin_instance":"0","type":"percent","type_instance":"nice"}]
14
            [{"values":[0],"dstypes":["gauge"],"dsnames":["value"],"time":1444745145.824,"interval":10.000,"host":"myhost","plugin":"cpu","plugin_instance":"0","type":"percent","type_instance":"interrupt"}]
15
            [{"values":[1.1],"dstypes":["gauge"],"dsnames":["value"],"time":1444745136.182,"interval":10.000,"host":"myhost","plugin":"memory","plugin_instance":"","type":"percent","type_instance":"slab_recl"}]
16
            """
17
        self.messages = b'\n'.join(num_messages * [self.sample_messages])
18
19
    @profile
20
    def profile_messages(self):
21
        self.encoder.encode(self.messages)
22
23
24
if __name__ == '__main__':
25
    profiler = ProfileCollectdJsonEncoder()
26
    profiler.profile_messages()
27