Completed
Push — main ( ac2663...40d33d )
by Switcheolytics
18s queued 11s
created

NetworkCrawlerClient.__init__()   D

Complexity

Conditions 13

Size

Total Lines 77
Code Lines 56

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 13
eloc 56
nop 6
dl 0
loc 77
rs 4.2
c 0
b 0
f 0

How to fix   Long Method    Complexity   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

Complexity

Complex classes like tradehub.decentralized_client.NetworkCrawlerClient.__init__() often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.

Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.

1
"""
2
Description:
3
4
    Decentralized Client Class for Crawling the Tradehub network.
5
    This client is the basis to all classes because it allows network calls to fail over to other nodes.
6
    This class is designed to find available public nodes to interact with for API and Websocket calls.
7
8
Usage::
9
10
    from tradehub.decentralized_client import NetworkCrawlerClient
11
"""
12
13
import multiprocessing as mp
14
from tradehub.utils import Request
15
import random
16
from requests.exceptions import ConnectionError, HTTPError, Timeout
17
import socket
18
# import threading
19
20
21
class NetworkCrawlerClient(object):
22
    """
23
    This class crawls the Switcheo Validator network to build a list of accessible endpoints for APIs and Websockets.
24
    Execution of this function is as follows::
25
26
        NetworkCrawlerClient(network='mainnet',
27
                             trusted_ip_list=None,
28
                             trusted_uri_list=None,
29
                             is_secure=False,
30
                             is_websocket_client=True)
31
    """
32
33
    def __init__(self,
34
                 network: str = "testnet",
35
                 trusted_ip_list: list = None,
36
                 trusted_uri_list: list = None,
37
                 is_secure: bool = False,
38
                 is_websocket_client: bool = False):
39
        """
40
        :param network: The network you want to interact with. Accepts "testnet" or "mainnet".
41
        :type network: str
42
        :param trusted_ip_list: Known and trusted IPs to connect to for your API requests.
43
        :type trusted_ip_list: list
44
        :param trusted_uri_list: Known and trusted URIs to connect to for your API requests.
45
        :type trusted_uri_list: list
46
        :param is_secure: Flag for setting secure connection on or off.
47
        :type is_secure: bool
48
        :param is_websocket_client: Flag for finding and setting websocket variables.
49
        :type is_websocket_client: bool
50
        """
51
        if network.lower() not in ["main", "mainnet", "test", "testnet"]:
52
            raise ValueError("Parameter network - {} - is not valid, requires main, mainnent, test, or testnet.".format(network))
53
54
        if trusted_ip_list and trusted_uri_list:
55
            raise ValueError("Can't use both IP and URI list, only pass one option.")
56
57
        if trusted_ip_list or trusted_uri_list:
58
            self.BYPASS_NETWORK_CRAWLER = True
59
        else:
60
            self.BYPASS_NETWORK_CRAWLER = False
61
62
        self.is_secure = is_secure
63
        if self.is_secure:
64
            self.http_string = 'https'
65
            self.ws_string = 'wss'
66
        else:
67
            self.http_string = 'http'
68
            self.ws_string = 'ws'
69
        self.is_websocket_client = is_websocket_client
70
        self.active_ws_uri_list = []
71
72
        if not self.BYPASS_NETWORK_CRAWLER:
73
            self.seed_peers_list = {
74
                "main": ["54.255.5.46", "175.41.151.35"],
75
                "mainnet": ["54.255.5.46", "175.41.151.35"],
76
                "test": ["54.255.42.175", "52.220.152.108"],
77
                "testnet": ["54.255.42.175", "52.220.152.108"],
78
            }
79
            self.tradescan_node_url = {
80
                "main": "https://switcheo.org/nodes?net=main",
81
                "mainnet": "https://switcheo.org/nodes?net=main",
82
                "test": "https://switcheo.org/nodes?net=test",
83
                "testnet": "https://switcheo.org/nodes?net=test",
84
            }
85
86
            self.all_peers_list = self.seed_peers_list[network.lower()]
87
            self.active_validator_list = []
88
            self.active_sentry_api_list = []
89
            self.validator_crawler_mp()
90
            self.sentry_status_request(uri=False)
91
        elif trusted_ip_list:
92
            self.all_peers_list = trusted_ip_list
93
            self.active_validator_list = trusted_ip_list
94
            self.active_sentry_api_list = []
95
            self.sentry_status_request(uri=False)
96
            if not self.active_sentry_api_list:
97
                self.active_sentry_api_list = trusted_ip_list
98
        elif trusted_uri_list:
99
            self.all_peers_list = trusted_uri_list
100
            self.active_validator_list = trusted_uri_list
101
            self.active_sentry_api_list = []
102
            self.sentry_status_request(uri=True)
103
            if not self.active_sentry_api_list:
104
                self.active_sentry_api_list = trusted_uri_list
105
        self.active_sentry_uri = self.active_sentry_api_list[random.randint(a=0, b=len(self.active_sentry_api_list)-1)]
106
        self.active_sentry_api_ip = self.active_sentry_uri.split(':')[1][2:]
107
        if self.is_websocket_client:
108
            self.active_ws_uri = self.active_ws_uri_list[random.randint(a=0, b=len(self.active_ws_uri_list)-1)]
109
            self.active_ws_ip = self.active_ws_uri.split(':')[1][2:]
110
111
    def validator_crawler_mp(self):
112
        """
113
        Crawl the Tradehub Validators to test for available API endpoints.
114
115
        Execution of this function is as follows::
116
117
            validator_crawler_mp()
118
        """
119
        checked_peers_list = []
120
        unchecked_peers_list = list(set(self.all_peers_list) - set(checked_peers_list))
121
122
        while unchecked_peers_list:
123
124
            pool = mp.Pool(processes=10)
125
            validator_outputs = pool.map(self.validator_status_request, unchecked_peers_list)
126
            pool.close()
127
            pool.join()
128
129
            for validator in validator_outputs:
130
                self.all_peers_list.append(validator["ip"])
131
                checked_peers_list.append(validator["ip"])
132
                if validator["validator_status"] == "Active" and not validator["catching_up"]:
133
                    self.active_validator_list.append(validator["ip"])
134
                for connected_node in validator["connected_nodes"]:
135
                    self.all_peers_list.append(connected_node["node_ip"])
136
137
            self.all_peers_list = list(dict.fromkeys(self.all_peers_list))
138
            checked_peers_list = list(dict.fromkeys(checked_peers_list))
139
            self.active_validator_list = list(dict.fromkeys(self.active_validator_list))
140
            unchecked_peers_list = list(set(self.all_peers_list) - set(checked_peers_list))
141
142
            # If the initial peers do not return any reults, query Tradescan API.
143
            # if not self.active_peers_list:
144
            #     validators = Request(api_url=self.tradescan_node_url, timeout=30).get()
145
            #     for validator in validators:
146
            #         unchecked_peers_list.append(validator["ip"])
147
148
    def validator_status_request(self, validator_ip: str):
149
        """
150
        Function that makes the network requests to the Tradehub validators across the network.
151
152
        Execution of this function is as follows::
153
154
            validator_status_request(validator_ip='54.255.5.46')
155
156
        The expected return result for this function is as follows::
157
158
            {
159
                'moniker': 'spock',
160
                'id': 'ca1189045e84d2be5db0a1ed326ce7cd56015f11',
161
                'ip': '54.255.5.46',
162
                'version': '0.33.7',
163
                'network': 'switcheo-tradehub-1',
164
                'latest_block_hash': 'DF194D43058D3CD89DD98A7DA28164B239B9693C822A1DB16CCC27FB49CA587B',
165
                'latest_block_height': '7995864',
166
                'latest_block_time': '2021-02-27T19:51:00.162091183Z',
167
                'earliest_block_height': '1',
168
                'earliest_block_time': '2020-08-14T07:32:27.856700491Z',
169
                'catching_up': False,
170
                'validator_address': '7091A72888509B3F3069231081621DC988D63542',
171
                'validator_pub_key_type': 'tendermint/PubKeyEd25519',
172
                'validator_pub_key': 'epMp0h65WflL7r8tPGQwusVMbCHgy7ucRg8eDlEJPW0=',
173
                'validator_voting_power': '0',
174
                'validator_status': 'Active',
175
                'connected_nodes': [
176
                    {
177
                        'node_id': 'd57a64f41487b5e421e91b71dceb0784cae57733',
178
                        'node_ip': '195.201.82.228',
179
                        'node_full': '[email protected]'
180
                    },
181
                    ...
182
                ]
183
            }
184
185
        :param validator_ip: String of the IP address to connect to.
186
        :return: Dictionary of validators that the crawler has found with the status.
187
        """
188
        validator_status = {}
189
        try:
190
            process_peer = True
191
            validator_status["ip"] = validator_ip
192
            i = Request(api_url="{}://{}:26657".format(self.http_string, validator_ip), timeout=1).get(path='/net_info')
193
        except (ValueError, ConnectionError, HTTPError, Timeout) as e:
194
            validator_status["validator_status"] = "Unknown - Cannot Connect to Retrieve Validator INFO - {}".format(e)
195
            validator_status["connected_nodes"] = []
196
            process_peer = False
197
198
        if process_peer:
199
            connected_nodes = []
200
201
            for connected_peer in i["result"]["peers"]:
202
                connected_nodes.append({
203
                    "node_id": connected_peer["node_info"]["id"],
204
                    "node_ip": connected_peer["remote_ip"],
205
                    "node_full": "{}@{}".format(connected_peer["node_info"]["id"], connected_peer["remote_ip"])
206
                })
207
208
            try:
209
                s = Request(api_url="{}://{}:26657".format(self.http_string, validator_ip), timeout=1).get(path='/status')
210
            except (ValueError, ConnectionError, HTTPError, Timeout) as e:
211
                validator_status["validator_status"] = "Unknown - Cannot Connect to Retrieve Status end point - {}".format(e)
212
                validator_status["connected_nodes"] = []
213
                process_peer = False
214
215
            if process_peer:
216
                validator_status = self.parse_validator_status(request_json=s, validator_ip=validator_ip)
217
                validator_status["validator_status"] = "Active"
218
                validator_status["connected_nodes"] = connected_nodes
219
220
        return validator_status
221
222
    def parse_validator_status(self, request_json: dict, validator_ip: str):
223
        """
224
        Function to parse each peer's JSON element and build information about each.
225
226
        Execution of this function is as follows::
227
228
            parse_validator_status(request_json='/status', validator_ip='54.255.5.46')
229
230
        The expected return result for this function is as follows::
231
232
            {
233
                'moniker': 'spock',
234
                'id': 'ca1189045e84d2be5db0a1ed326ce7cd56015f11',
235
                'ip': '54.255.5.46',
236
                'version': '0.33.7',
237
                'network': 'switcheo-tradehub-1',
238
                'latest_block_hash': 'DF194D43058D3CD89DD98A7DA28164B239B9693C822A1DB16CCC27FB49CA587B',
239
                'latest_block_height': '7995864',
240
                'latest_block_time': '2021-02-27T19:51:00.162091183Z',
241
                'earliest_block_height': '1',
242
                'earliest_block_time': '2020-08-14T07:32:27.856700491Z',
243
                'catching_up': False,
244
                'validator_address': '7091A72888509B3F3069231081621DC988D63542',
245
                'validator_pub_key_type': 'tendermint/PubKeyEd25519',
246
                'validator_pub_key': 'epMp0h65WflL7r8tPGQwusVMbCHgy7ucRg8eDlEJPW0=',
247
                'validator_voting_power': '0'
248
            }
249
250
        :param request_json: Dictionary of the return response from the validator status request.
251
        :param validator_ip: String of the IP address to connect to.
252
        :return: Dictionary of validator information.
253
        """
254
        return {
255
            "moniker": request_json["result"]["node_info"]["moniker"],
256
            "id": request_json["result"]["node_info"]["id"],
257
            "ip": validator_ip,
258
            "version": request_json["result"]["node_info"]["version"],
259
            "network": request_json["result"]["node_info"]["network"],
260
            "latest_block_hash": request_json["result"]["sync_info"]["latest_block_hash"],
261
            "latest_block_height": request_json["result"]["sync_info"]["latest_block_height"],
262
            "latest_block_time": request_json["result"]["sync_info"]["latest_block_time"],
263
            "earliest_block_height": request_json["result"]["sync_info"]["earliest_block_height"],
264
            "earliest_block_time": request_json["result"]["sync_info"]["earliest_block_time"],
265
            "catching_up": request_json["result"]["sync_info"]["catching_up"],
266
            "validator_address": request_json["result"]["validator_info"]["address"],
267
            "validator_pub_key_type": request_json["result"]["validator_info"]["pub_key"]["type"],
268
            "validator_pub_key": request_json["result"]["validator_info"]["pub_key"]["value"],
269
            "validator_voting_power": request_json["result"]["validator_info"]["voting_power"]
270
        }
271
272
    def sentry_status_request(self, uri: bool = False):
273
        """
274
        This function is here to ensure the nodes that have open network connections also have their persistence service running.
275
        Many times the network connection is open for communication but the persistence service will not be on.
276
277
        Execution of this function is as follows::
278
279
            sentry_status_request(uri=True)
280
281
        :param uri: Bool value for a URI or IP address.
282
        """
283
        for active_validator in self.active_validator_list:
284
            if uri:
285
                try:
286
                    # Have to check the "/get_status" endpoint because the port could be open and the validator fully synced but have the persistence service inactive, shutdown, stopped, or non-repsonsive.
287
                    Request(api_url=active_validator, timeout=1).get(path='/get_status')
288
                    self.active_sentry_api_list.append(active_validator)
289
                    if self.is_websocket_client:
290
                        self.websocket_status_check(ip=active_validator)
291
                except (ValueError, ConnectionError, HTTPError, Timeout):
292
                    pass
293
            else:
294
                # 1318 - Cosmos REST; 5001 - Demex REST; 5002 - Reverse Proxy for Demex and Cosmos REST; Recommended to not use proxy
295
                for port in ["5001"]:
296
                    try:
297
                        # Have to check the "/get_status" endpoint because the port could be open and the validator fully synced but have the persistence service inactive, shutdown, stopped, or non-repsonsive.
298
                        Request(api_url="{}://{}:{}".format(self.http_string, active_validator, port), timeout=1).get(path='/get_status')
299
                        self.active_sentry_api_list.append('{}://{}:{}'.format(self.http_string, active_validator, port))
300
                        if self.is_websocket_client:
301
                            self.websocket_status_check(ip=active_validator)
302
                    except (ValueError, ConnectionError, HTTPError, Timeout):
303
                        pass
304
305
        self.active_sentry_api_list = list(dict.fromkeys(self.active_sentry_api_list))
306
        self.active_ws_uri_list = list(dict.fromkeys(self.active_ws_uri_list))
307
308
    def websocket_status_check(self, ip: str, port: int = 5000):
309
        """
310
        Function to check if the websocket port is open for communication.
311
        This is called as part of the sentry check because calling the websocket also requires the persistence service to be available.
312
313
        Execution of this function is as follows::
314
315
            websocket_status_check(ip='54.255.5.46', port=5000)
316
317
        :param ip: String of the IP address to connect to.
318
        :param port: Int value for the port to be checked.
319
        """
320
        try:
321
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
322
            location = (ip, port)
323
            result_of_check = s.connect_ex(location)
324
            if result_of_check == 0:
325
                self.active_ws_uri_list.append('{}://{}:{}/ws'.format(self.ws_string, ip, port))
326
            s.close()
327
        except socket.error:
328
            pass
329
330
    # def update_validators_and_sentries(self):
331
    #     threading.Timer(5.0, self.update_validators_and_sentries).start()
332
    #     self.validator_crawler_mp()
333
    #     self.sentry_status_request()
334
    #     self.active_sentry_api_ip = self.active_sentry_api_list[random.randint(a=0, b=len(self.active_sentry_api_list)-1)]
335
336
    def tradehub_get_request(self, path: str, params: dict = None):
337
        """
338
        This is a wrapper for the get request function to allow for retrying network calls on different available validators if a request fails.
339
340
        Execution of this function is as follows::
341
342
            tradehub_get_request(path='/txs')
343
344
        :param path: String of the path being used for the network request that is going to be made.
345
        :param params: Dict values that will added to the get request.
346
        :return: Dictionary of the return request based on the network path sent.
347
        """
348
        try:
349
            req = Request(api_url=self.active_sentry_uri, timeout=2).get(path=path, params=params)
350
            return req
351
        except (ValueError, ConnectionError, HTTPError, Timeout):
352
            self.active_sentry_api_list.remove(self.active_sentry_uri)
353
            if not self.active_sentry_api_list and not self.BYPASS_NETWORK_CRAWLER:
354
                self.validator_crawler_mp()
355
                self.sentry_status_request()
356
            elif not self.active_sentry_api_list and self.BYPASS_NETWORK_CRAWLER:
357
                raise ValueError("Provided Sentry API IP addresses are not responding.")
358
            self.active_sentry_uri = self.active_sentry_api_list[random.randint(a=0, b=len(self.active_sentry_api_list)-1)]
359
            return self.tradehub_get_request(path=path, params=params)
360
361
    def tradehub_post_request(self, path: str, data: dict = None, json_data: dict = None, params: dict = None):
362
        """
363
        This is a wrapper for the post request function to allow for retrying network calls on different available validators if a request fails.
364
365
        Execution of this function is as follows::
366
367
            tradehub_post_request(path='/txs')
368
369
        :param path: String of the path being used for the network request that is going to be made.
370
        :param data: Dict values that will added to the post request.
371
        :param json_data: Dict values that will added to the post request.
372
        :param params: Dict values that will added to the post request.
373
        :return: Dictionary of the return request based on the network path sent.
374
        """
375
        try:
376
            req = Request(api_url=self.active_sentry_uri, timeout=2).post(path=path, data=data, json_data=json_data, params=params)
377
            return req
378
        except (ValueError, ConnectionError, HTTPError, Timeout):
379
            self.active_sentry_api_list.remove(self.active_sentry_uri)
380
            if not self.active_sentry_api_list and not self.BYPASS_NETWORK_CRAWLER:
381
                self.validator_crawler_mp()
382
                self.sentry_status_request()
383
            elif not self.active_sentry_api_list and self.BYPASS_NETWORK_CRAWLER:
384
                raise ValueError("Provided Sentry API IP addresses are not responding.")
385
            self.active_sentry_uri = self.active_sentry_api_list[random.randint(a=0, b=len(self.active_sentry_api_list)-1)]
386
            return self.tradehub_post_request(path=path, data=data, json_data=json_data, params=params)
387