1
|
|
|
#!/usr/bin/env python3 |
2
|
4 |
|
"""This module is used for network connections; APIs, downloading, etc.""" |
3
|
|
|
|
4
|
4 |
|
import os # filesystem read |
5
|
4 |
|
try: |
6
|
4 |
|
from defusedxml import ElementTree # safer XML parsing |
7
|
|
|
except (ImportError, AttributeError): |
8
|
|
|
from xml.etree import ElementTree # XML parsing |
9
|
4 |
|
import re # regexes |
10
|
4 |
|
import concurrent.futures # multiprocessing/threading |
11
|
4 |
|
import glob # pem file lookup |
12
|
4 |
|
import requests # downloading |
13
|
4 |
|
from bs4 import BeautifulSoup # scraping |
14
|
4 |
|
from bbarchivist import utilities # parse filesize |
15
|
4 |
|
from bbarchivist.bbconstants import SERVERS # lookup servers |
16
|
|
|
|
17
|
4 |
|
__author__ = "Thurask" |
18
|
4 |
|
__license__ = "WTFPL v2" |
19
|
4 |
|
__copyright__ = "Copyright 2015-2016 Thurask" |
20
|
|
|
|
21
|
|
|
|
22
|
4 |
|
def grab_pem(): |
23
|
|
|
""" |
24
|
|
|
Work with either local cacerts or system cacerts. |
25
|
|
|
""" |
26
|
4 |
|
try: |
27
|
4 |
|
pemfile = glob.glob(os.path.join(os.getcwd(), "cacert.pem"))[0] |
28
|
4 |
|
except IndexError: |
29
|
4 |
|
return requests.certs.where() # no local cacerts |
30
|
|
|
else: |
31
|
4 |
|
return os.path.abspath(pemfile) # local cacerts |
32
|
|
|
|
33
|
|
|
|
34
|
4 |
|
def pem_wrapper(method): |
35
|
|
|
""" |
36
|
|
|
Decorator to set REQUESTS_CA_BUNDLE. |
37
|
|
|
|
38
|
|
|
:param method: Method to use. |
39
|
|
|
:type method: function |
40
|
|
|
""" |
41
|
4 |
|
def wrapper(*args, **kwargs): |
42
|
|
|
""" |
43
|
|
|
Set REQUESTS_CA_BUNDLE before doing function. |
44
|
|
|
""" |
45
|
4 |
|
os.environ["REQUESTS_CA_BUNDLE"] = grab_pem() |
46
|
4 |
|
return method(*args, **kwargs) |
47
|
4 |
|
return wrapper |
48
|
|
|
|
49
|
|
|
|
50
|
4 |
|
def generic_soup_parser(url, session=None): |
51
|
|
|
""" |
52
|
|
|
Get a BeautifulSoup HTML parser for some URL. |
53
|
|
|
|
54
|
|
|
:param url: The URL to check. |
55
|
|
|
:type url: str |
56
|
|
|
|
57
|
|
|
:param session: Requests session object, default is created on the fly. |
58
|
|
|
:type session: requests.Session() |
59
|
|
|
""" |
60
|
4 |
|
session = requests.Session() if session is None else session |
61
|
4 |
|
req = session.get(url) |
62
|
4 |
|
soup = BeautifulSoup(req.content, "html.parser") |
63
|
4 |
|
return soup |
64
|
|
|
|
65
|
|
|
|
66
|
4 |
|
@pem_wrapper |
67
|
4 |
|
def get_length(url, session=None): |
68
|
|
|
""" |
69
|
|
|
Get content-length header from some URL. |
70
|
|
|
|
71
|
|
|
:param url: The URL to check. |
72
|
|
|
:type url: str |
73
|
|
|
|
74
|
|
|
:param session: Requests session object, default is created on the fly. |
75
|
|
|
:type session: requests.Session() |
76
|
|
|
""" |
77
|
4 |
|
session = requests.Session() if session is None else session |
78
|
4 |
|
if url is None: |
79
|
4 |
|
return 0 |
80
|
4 |
|
try: |
81
|
4 |
|
heads = session.head(url) |
82
|
4 |
|
fsize = heads.headers['content-length'] |
83
|
4 |
|
return int(fsize) |
84
|
4 |
|
except requests.ConnectionError: |
85
|
4 |
|
return 0 |
86
|
|
|
|
87
|
|
|
|
88
|
4 |
|
@pem_wrapper |
89
|
4 |
|
def download(url, output_directory=None, session=None): |
90
|
|
|
""" |
91
|
|
|
Download file from given URL. |
92
|
|
|
|
93
|
|
|
:param url: URL to download from. |
94
|
|
|
:type url: str |
95
|
|
|
|
96
|
|
|
:param output_directory: Download folder. Default is local. |
97
|
|
|
:type output_directory: str |
98
|
|
|
|
99
|
|
|
:param session: Requests session object, default is created on the fly. |
100
|
|
|
:type session: requests.Session() |
101
|
|
|
""" |
102
|
4 |
|
session = requests.Session() if session is None else session |
103
|
4 |
|
output_directory = os.getcwd() if output_directory is None else output_directory |
104
|
4 |
|
lfname = url.split('/')[-1] |
105
|
4 |
|
sname = utilities.stripper(lfname) |
106
|
4 |
|
fname = os.path.join(output_directory, lfname) |
107
|
4 |
|
download_writer(url, fname, lfname, sname, session) |
108
|
4 |
|
remove_empty_download(fname) |
109
|
|
|
|
110
|
|
|
|
111
|
4 |
|
def remove_empty_download(fname): |
112
|
|
|
""" |
113
|
|
|
Remove file if it's empty. |
114
|
|
|
|
115
|
|
|
:param fname: File path. |
116
|
|
|
:type fname: str |
117
|
|
|
""" |
118
|
4 |
|
if os.stat(fname).st_size == 0: |
119
|
4 |
|
os.remove(fname) |
120
|
|
|
|
121
|
|
|
|
122
|
4 |
|
def download_writer(url, fname, lfname, sname, session=None): |
123
|
|
|
""" |
124
|
|
|
Download file and write to disk. |
125
|
|
|
|
126
|
|
|
:param url: URL to download from. |
127
|
|
|
:type url: str |
128
|
|
|
|
129
|
|
|
:param fname: File path. |
130
|
|
|
:type fname: str |
131
|
|
|
|
132
|
|
|
:param lfname: Long filename. |
133
|
|
|
:type lfname: str |
134
|
|
|
|
135
|
|
|
:param sname: Short name, for printing to screen. |
136
|
|
|
:type sname: str |
137
|
|
|
|
138
|
|
|
:param session: Requests session object, default is created on the fly. |
139
|
|
|
:type session: requests.Session() |
140
|
|
|
""" |
141
|
4 |
|
with open(fname, "wb") as file: |
142
|
4 |
|
req = session.get(url, stream=True) |
143
|
4 |
|
clength = req.headers['content-length'] |
144
|
4 |
|
fsize = utilities.fsizer(clength) |
145
|
4 |
|
if req.status_code == 200: # 200 OK |
146
|
4 |
|
print("DOWNLOADING {0} [{1}]".format(sname, fsize)) |
147
|
4 |
|
for chunk in req.iter_content(chunk_size=1024): |
148
|
4 |
|
file.write(chunk) |
149
|
|
|
else: |
150
|
4 |
|
print("ERROR: HTTP {0} IN {1}".format(req.status_code, lfname)) |
151
|
|
|
|
152
|
|
|
|
153
|
4 |
|
def download_bootstrap(urls, outdir=None, workers=5, session=None): |
154
|
|
|
""" |
155
|
|
|
Run downloaders for each file in given URL iterable. |
156
|
|
|
|
157
|
|
|
:param urls: URLs to download. |
158
|
|
|
:type urls: list |
159
|
|
|
|
160
|
|
|
:param outdir: Download folder. Default is handled in :func:`download`. |
161
|
|
|
:type outdir: str |
162
|
|
|
|
163
|
|
|
:param workers: Number of worker processes. Default is 5. |
164
|
|
|
:type workers: int |
165
|
|
|
|
166
|
|
|
:param session: Requests session object, default is created on the fly. |
167
|
|
|
:type session: requests.Session() |
168
|
|
|
""" |
169
|
4 |
|
workers = len(urls) if len(urls) < workers else workers |
170
|
4 |
|
spinman = utilities.SpinManager() |
171
|
4 |
|
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as xec: |
172
|
4 |
|
try: |
173
|
4 |
|
spinman.start() |
174
|
4 |
|
for url in urls: |
175
|
4 |
|
xec.submit(download, url, outdir, session) |
176
|
|
|
except (KeyboardInterrupt, SystemExit): |
177
|
|
|
xec.shutdown() |
178
|
|
|
spinman.stop() |
179
|
4 |
|
spinman.stop() |
180
|
4 |
|
utilities.spinner_clear() |
181
|
4 |
|
utilities.line_begin() |
182
|
|
|
|
183
|
|
|
|
184
|
4 |
|
@pem_wrapper |
185
|
4 |
|
def availability(url, session=None): |
186
|
|
|
""" |
187
|
|
|
Check HTTP status code of given URL. |
188
|
|
|
200 or 301-308 is OK, else is not. |
189
|
|
|
|
190
|
|
|
:param url: URL to check. |
191
|
|
|
:type url: str |
192
|
|
|
|
193
|
|
|
:param session: Requests session object, default is created on the fly. |
194
|
|
|
:type session: requests.Session() |
195
|
|
|
""" |
196
|
4 |
|
session = requests.Session() if session is None else session |
197
|
4 |
|
try: |
198
|
4 |
|
avlty = session.head(url) |
199
|
4 |
|
status = int(avlty.status_code) |
200
|
4 |
|
return status == 200 or 300 < status <= 308 |
201
|
4 |
|
except requests.ConnectionError: |
202
|
4 |
|
return False |
203
|
|
|
|
204
|
|
|
|
205
|
4 |
|
def clean_availability(results, server): |
206
|
|
|
""" |
207
|
|
|
Clean availability for autolookup script. |
208
|
|
|
|
209
|
|
|
:param results: Result dict. |
210
|
|
|
:type results: dict(str: str) |
211
|
|
|
|
212
|
|
|
:param server: Server, key for result dict. |
213
|
|
|
:type server: str |
214
|
|
|
""" |
215
|
4 |
|
marker = "PD" if server == "p" else server.upper() |
216
|
4 |
|
rel = results[server.lower()] |
217
|
4 |
|
avail = marker if rel != "SR not in system" and rel is not None else " " |
218
|
4 |
|
return rel, avail |
219
|
|
|
|
220
|
|
|
|
221
|
4 |
|
@pem_wrapper |
222
|
4 |
|
def carrier_checker(mcc, mnc, session=None): |
223
|
|
|
""" |
224
|
|
|
Query BlackBerry World to map a MCC and a MNC to a country and carrier. |
225
|
|
|
|
226
|
|
|
:param mcc: Country code. |
227
|
|
|
:type mcc: int |
228
|
|
|
|
229
|
|
|
:param mnc: Network code. |
230
|
|
|
:type mnc: int |
231
|
|
|
|
232
|
|
|
:param session: Requests session object, default is created on the fly. |
233
|
|
|
:type session: requests.Session() |
234
|
|
|
""" |
235
|
4 |
|
session = requests.Session() if session is None else session |
236
|
4 |
|
url = "http://appworld.blackberry.com/ClientAPI/checkcarrier?homemcc={0}&homemnc={1}&devicevendorid=-1&pin=0".format( |
237
|
|
|
mcc, mnc) |
238
|
4 |
|
user_agent = {'User-agent': 'AppWorld/5.1.0.60'} |
239
|
4 |
|
req = session.get(url, headers=user_agent) |
240
|
4 |
|
root = ElementTree.fromstring(req.text) |
241
|
4 |
|
for child in root: |
242
|
4 |
|
if child.tag == "country": |
243
|
4 |
|
country = child.get("name") |
244
|
4 |
|
if child.tag == "carrier": |
245
|
4 |
|
carrier = child.get("name") |
246
|
4 |
|
return country, carrier |
247
|
|
|
|
248
|
|
|
|
249
|
4 |
|
def return_npc(mcc, mnc): |
250
|
|
|
""" |
251
|
|
|
Format MCC and MNC into a NPC. |
252
|
|
|
|
253
|
|
|
:param mcc: Country code. |
254
|
|
|
:type mcc: int |
255
|
|
|
|
256
|
|
|
:param mnc: Network code. |
257
|
|
|
:type mnc: int |
258
|
|
|
""" |
259
|
4 |
|
return "{0}{1}30".format(str(mcc).zfill(3), str(mnc).zfill(3)) |
260
|
|
|
|
261
|
|
|
|
262
|
4 |
|
@pem_wrapper |
263
|
4 |
|
def carrier_query(npc, device, upgrade=False, blitz=False, forced=None, session=None): |
264
|
|
|
""" |
265
|
|
|
Query BlackBerry servers, check which update is out for a carrier. |
266
|
|
|
|
267
|
|
|
:param npc: MCC + MNC (see `func:return_npc`) |
268
|
|
|
:type npc: int |
269
|
|
|
|
270
|
|
|
:param device: Hexadecimal hardware ID. |
271
|
|
|
:type device: str |
272
|
|
|
|
273
|
|
|
:param upgrade: Whether to use upgrade files. False by default. |
274
|
|
|
:type upgrade: bool |
275
|
|
|
|
276
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
277
|
|
|
:type blitz: bool |
278
|
|
|
|
279
|
|
|
:param forced: Force a software release. |
280
|
|
|
:type forced: str |
281
|
|
|
|
282
|
|
|
:param session: Requests session object, default is created on the fly. |
283
|
|
|
:type session: requests.Session() |
284
|
|
|
""" |
285
|
4 |
|
session = requests.Session() if session is None else session |
286
|
4 |
|
upg = "upgrade" if upgrade else "repair" |
287
|
4 |
|
forced = "latest" if forced is None else forced |
288
|
4 |
|
url = "https://cs.sl.blackberry.com/cse/updateDetails/2.2/" |
289
|
4 |
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
290
|
4 |
|
query += '<updateDetailRequest version="2.2.1" authEchoTS="1366644680359">' |
291
|
4 |
|
query += "<clientProperties>" |
292
|
4 |
|
query += "<hardware>" |
293
|
4 |
|
query += "<pin>0x2FFFFFB3</pin><bsn>1128121361</bsn>" |
294
|
4 |
|
query += "<imei>004401139269240</imei>" |
295
|
4 |
|
query += "<id>0x{0}</id>".format(device) |
296
|
4 |
|
query += "</hardware>" |
297
|
4 |
|
query += "<network>" |
298
|
4 |
|
query += "<homeNPC>0x{0}</homeNPC>".format(npc) |
299
|
4 |
|
query += "<iccid>89014104255505565333</iccid>" |
300
|
4 |
|
query += "</network>" |
301
|
4 |
|
query += "<software>" |
302
|
4 |
|
query += "<currentLocale>en_US</currentLocale>" |
303
|
4 |
|
query += "<legalLocale>en_US</legalLocale>" |
304
|
4 |
|
query += "</software>" |
305
|
4 |
|
query += "</clientProperties>" |
306
|
4 |
|
query += "<updateDirectives>" |
307
|
4 |
|
query += '<allowPatching type="REDBEND">true</allowPatching>' |
308
|
4 |
|
query += "<upgradeMode>{0}</upgradeMode>".format(upg) |
309
|
4 |
|
query += "<provideDescriptions>false</provideDescriptions>" |
310
|
4 |
|
query += "<provideFiles>true</provideFiles>" |
311
|
4 |
|
query += "<queryType>NOTIFICATION_CHECK</queryType>" |
312
|
4 |
|
query += "</updateDirectives>" |
313
|
4 |
|
query += "<pollType>manual</pollType>" |
314
|
4 |
|
query += "<resultPackageSetCriteria>" |
315
|
4 |
|
query += '<softwareRelease softwareReleaseVersion="{0}" />'.format(forced) |
316
|
4 |
|
query += "<releaseIndependent>" |
317
|
4 |
|
query += '<packageType operation="include">application</packageType>' |
318
|
4 |
|
query += "</releaseIndependent>" |
319
|
4 |
|
query += "</resultPackageSetCriteria>" |
320
|
4 |
|
query += "</updateDetailRequest>" |
321
|
4 |
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
322
|
4 |
|
req = session.post(url, headers=header, data=query) |
323
|
4 |
|
return parse_carrier_xml(req.text, blitz) |
324
|
|
|
|
325
|
|
|
|
326
|
4 |
|
def carrier_swver_get(root): |
327
|
|
|
""" |
328
|
|
|
Get software release from carrier XML. |
329
|
|
|
|
330
|
|
|
:param root: ElementTree we're barking up. |
331
|
|
|
:type root: xml.etree.ElementTree.ElementTree |
332
|
|
|
""" |
333
|
4 |
|
for child in root.iter("softwareReleaseMetadata"): |
334
|
4 |
|
swver = child.get("softwareReleaseVersion") |
335
|
4 |
|
return swver |
336
|
|
|
|
337
|
|
|
|
338
|
4 |
|
def carrier_child_fileappend(child, files, baseurl, blitz=False): |
339
|
|
|
""" |
340
|
|
|
Append bar file links to a list from a child element. |
341
|
|
|
|
342
|
|
|
:param child: Child element in use. |
343
|
|
|
:type child: xml.etree.ElementTree.Element |
344
|
|
|
|
345
|
|
|
:param files: Filelist. |
346
|
|
|
:type files: list(str) |
347
|
|
|
|
348
|
|
|
:param baseurl: Base URL, URL minus the filename. |
349
|
|
|
:type baseurl: str |
350
|
|
|
|
351
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
352
|
|
|
:type blitz: bool |
353
|
|
|
""" |
354
|
4 |
|
if not blitz: |
355
|
4 |
|
files.append(baseurl + child.get("path")) |
356
|
|
|
else: |
357
|
|
|
if child.get("type") not in ["system:radio", "system:desktop", "system:os"]: |
358
|
|
|
files.append(baseurl + child.get("path")) |
359
|
4 |
|
return files |
360
|
|
|
|
361
|
|
|
|
362
|
4 |
|
def carrier_child_finder(root, files, baseurl, blitz=False): |
363
|
|
|
""" |
364
|
|
|
Extract filenames, radio and OS from child elements. |
365
|
|
|
|
366
|
|
|
:param root: ElementTree we're barking up. |
367
|
|
|
:type root: xml.etree.ElementTree.ElementTree |
368
|
|
|
|
369
|
|
|
:param files: Filelist. |
370
|
|
|
:type files: list(str) |
371
|
|
|
|
372
|
|
|
:param baseurl: Base URL, URL minus the filename. |
373
|
|
|
:type baseurl: str |
374
|
|
|
|
375
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
376
|
|
|
:type blitz: bool |
377
|
|
|
""" |
378
|
4 |
|
osver = radver = "" |
379
|
4 |
|
for child in root.iter("package"): |
380
|
4 |
|
files = carrier_child_fileappend(child, files, baseurl, blitz) |
381
|
4 |
|
if child.get("type") == "system:radio": |
382
|
4 |
|
radver = child.get("version") |
383
|
4 |
|
elif child.get("type") == "system:desktop": |
384
|
4 |
|
osver = child.get("version") |
385
|
|
|
elif child.get("type") == "system:os": |
386
|
|
|
osver = child.get("version") |
387
|
4 |
|
return osver, radver, files |
388
|
|
|
|
389
|
|
|
|
390
|
4 |
|
def parse_carrier_xml(data, blitz=False): |
391
|
|
|
""" |
392
|
|
|
Parse the response to a carrier update request and return the juicy bits. |
393
|
|
|
|
394
|
|
|
:param data: The data to parse. |
395
|
|
|
:type data: xml |
396
|
|
|
|
397
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
398
|
|
|
:type blitz: bool |
399
|
|
|
""" |
400
|
4 |
|
root = ElementTree.fromstring(data) |
401
|
4 |
|
sw_exists = root.find('./data/content/softwareReleaseMetadata') |
402
|
4 |
|
swver = "N/A" if sw_exists is None else "" |
403
|
4 |
|
if sw_exists is not None: |
404
|
4 |
|
swver = carrier_swver_get(root) |
405
|
4 |
|
files = [] |
406
|
4 |
|
package_exists = root.find('./data/content/fileSets/fileSet') |
407
|
4 |
|
osver = radver = "" |
408
|
4 |
|
if package_exists is not None: |
409
|
4 |
|
baseurl = "{0}/".format(package_exists.get("url")) |
410
|
4 |
|
osver, radver, files = carrier_child_finder(root, files, baseurl, blitz) |
411
|
4 |
|
return(swver, osver, radver, files) |
412
|
|
|
|
413
|
|
|
|
414
|
4 |
|
@pem_wrapper |
415
|
4 |
|
def sr_lookup(osver, server, session=None): |
416
|
|
|
""" |
417
|
|
|
Software release lookup, with choice of server. |
418
|
|
|
:data:`bbarchivist.bbconstants.SERVERLIST` for server list. |
419
|
|
|
|
420
|
|
|
:param osver: OS version to lookup, 10.x.y.zzzz. |
421
|
|
|
:type osver: str |
422
|
|
|
|
423
|
|
|
:param server: Server to use. |
424
|
|
|
:type server: str |
425
|
|
|
|
426
|
|
|
:param session: Requests session object, default is created on the fly. |
427
|
|
|
:type session: requests.Session() |
428
|
|
|
""" |
429
|
4 |
|
session = requests.Session() if session is None else session |
430
|
4 |
|
reg = re.compile(r"(\d{1,4}\.)(\d{1,4}\.)(\d{1,4}\.)(\d{1,4})") |
431
|
4 |
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
432
|
4 |
|
query += '<srVersionLookupRequest version="2.0.0"' |
433
|
4 |
|
query += ' authEchoTS="1366644680359">' |
434
|
4 |
|
query += '<clientProperties><hardware>' |
435
|
4 |
|
query += '<pin>0x2FFFFFB3</pin><bsn>1140011878</bsn>' |
436
|
4 |
|
query += '<imei>004402242176786</imei><id>0x8D00240A</id>' |
437
|
4 |
|
query += '<isBootROMSecure>true</isBootROMSecure>' |
438
|
4 |
|
query += '</hardware>' |
439
|
4 |
|
query += '<network>' |
440
|
4 |
|
query += '<vendorId>0x0</vendorId><homeNPC>0x60</homeNPC>' |
441
|
4 |
|
query += '<currentNPC>0x60</currentNPC><ecid>0x1</ecid>' |
442
|
4 |
|
query += '</network>' |
443
|
4 |
|
query += '<software><currentLocale>en_US</currentLocale>' |
444
|
4 |
|
query += '<legalLocale>en_US</legalLocale>' |
445
|
4 |
|
query += '<osVersion>{0}</osVersion>'.format(osver) |
446
|
4 |
|
query += '<omadmEnabled>false</omadmEnabled>' |
447
|
4 |
|
query += '</software></clientProperties>' |
448
|
4 |
|
query += '</srVersionLookupRequest>' |
449
|
4 |
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
450
|
4 |
|
try: |
451
|
4 |
|
req = session.post(server, headers=header, data=query, timeout=1) |
452
|
4 |
|
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError): |
453
|
4 |
|
return "SR not in system" |
454
|
4 |
|
try: |
455
|
4 |
|
root = ElementTree.fromstring(req.text) |
456
|
4 |
|
except ElementTree.ParseError: |
457
|
4 |
|
return "SR not in system" |
458
|
|
|
else: |
459
|
4 |
|
packages = root.findall('./data/content/') |
460
|
4 |
|
for package in packages: |
461
|
4 |
|
if package.text is not None: |
462
|
4 |
|
match = reg.match(package.text) |
463
|
4 |
|
if match: |
464
|
4 |
|
return package.text |
465
|
|
|
else: |
466
|
4 |
|
return "SR not in system" |
467
|
|
|
|
468
|
|
|
|
469
|
4 |
|
def sr_lookup_bootstrap(osv, session=None, noalpha2=False): |
470
|
|
|
""" |
471
|
|
|
Run lookups for each server for given OS. |
472
|
|
|
|
473
|
|
|
:param osv: OS to check. |
474
|
|
|
:type osv: str |
475
|
|
|
|
476
|
|
|
:param session: Requests session object, default is created on the fly. |
477
|
|
|
:type session: requests.Session() |
478
|
|
|
|
479
|
|
|
:param noalpha2: Whether to skip Alpha2 server. Default is false. |
480
|
|
|
:type noalpha2: bool |
481
|
|
|
""" |
482
|
4 |
|
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as xec: |
483
|
4 |
|
try: |
484
|
4 |
|
results = { |
485
|
|
|
"p": None, |
486
|
|
|
"a1": None, |
487
|
|
|
"a2": None, |
488
|
|
|
"b1": None, |
489
|
|
|
"b2": None |
490
|
|
|
} |
491
|
4 |
|
if noalpha2: |
492
|
4 |
|
del results["a2"] |
493
|
4 |
|
for key in results: |
494
|
4 |
|
results[key] = xec.submit(sr_lookup, osv, SERVERS[key], session).result() |
495
|
4 |
|
return results |
496
|
|
|
except KeyboardInterrupt: |
497
|
|
|
xec.shutdown(wait=False) |
498
|
|
|
|
499
|
|
|
|
500
|
4 |
|
@pem_wrapper |
501
|
4 |
|
def available_bundle_lookup(mcc, mnc, device, session=None): |
502
|
|
|
""" |
503
|
|
|
Check which software releases were ever released for a carrier. |
504
|
|
|
|
505
|
|
|
:param mcc: Country code. |
506
|
|
|
:type mcc: int |
507
|
|
|
|
508
|
|
|
:param mnc: Network code. |
509
|
|
|
:type mnc: int |
510
|
|
|
|
511
|
|
|
:param device: Hexadecimal hardware ID. |
512
|
|
|
:type device: str |
513
|
|
|
|
514
|
|
|
:param session: Requests session object, default is created on the fly. |
515
|
|
|
:type session: requests.Session() |
516
|
|
|
""" |
517
|
4 |
|
session = requests.Session() if session is None else session |
518
|
4 |
|
server = "https://cs.sl.blackberry.com/cse/availableBundles/1.0.0/" |
519
|
4 |
|
npc = return_npc(mcc, mnc) |
520
|
4 |
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
521
|
4 |
|
query += '<availableBundlesRequest version="1.0.0" ' |
522
|
4 |
|
query += 'authEchoTS="1366644680359">' |
523
|
4 |
|
query += '<deviceId><pin>0x2FFFFFB3</pin></deviceId>' |
524
|
4 |
|
query += '<clientProperties><hardware><id>0x{0}</id>'.format(device) |
525
|
4 |
|
query += '<isBootROMSecure>true</isBootROMSecure></hardware>' |
526
|
4 |
|
query += '<network><vendorId>0x0</vendorId><homeNPC>0x{0}</homeNPC>'.format(npc) |
527
|
4 |
|
query += '<currentNPC>0x{0}</currentNPC></network><software>'.format(npc) |
528
|
4 |
|
query += '<currentLocale>en_US</currentLocale>' |
529
|
4 |
|
query += '<legalLocale>en_US</legalLocale>' |
530
|
4 |
|
query += '<osVersion>10.0.0.0</osVersion>' |
531
|
4 |
|
query += '<radioVersion>10.0.0.0</radioVersion></software>' |
532
|
4 |
|
query += '</clientProperties><updateDirectives><bundleVersionFilter>' |
533
|
4 |
|
query += '</bundleVersionFilter></updateDirectives>' |
534
|
4 |
|
query += '</availableBundlesRequest>' |
535
|
4 |
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
536
|
4 |
|
req = session.post(server, headers=header, data=query) |
537
|
4 |
|
root = ElementTree.fromstring(req.text) |
538
|
4 |
|
package = root.find('./data/content') |
539
|
4 |
|
bundlelist = [child.attrib["version"] for child in package] |
540
|
4 |
|
return bundlelist |
541
|
|
|
|
542
|
|
|
|
543
|
4 |
|
@pem_wrapper |
544
|
|
|
def ptcrb_scraper(ptcrbid): |
545
|
|
|
""" |
546
|
|
|
Get the PTCRB results for a given device. |
547
|
|
|
|
548
|
|
|
:param ptcrbid: Numerical ID from PTCRB (end of URL). |
549
|
|
|
:type ptcrbid: str |
550
|
|
|
""" |
551
|
4 |
|
baseurl = "https://ptcrb.com/vendor/complete/view_complete_request_guest.cfm?modelid={0}".format( |
552
|
|
|
ptcrbid) |
553
|
4 |
|
soup = generic_soup_parser(baseurl) |
554
|
4 |
|
text = soup.get_text() |
555
|
4 |
|
text = text.replace("\r\n", " ") |
556
|
4 |
|
prelimlist = re.findall("OS .+[^\\n]", text, re.IGNORECASE) |
557
|
4 |
|
if not prelimlist: # Priv |
558
|
4 |
|
prelimlist = re.findall(r"[A-Z]{3}[0-9]{3}[\s]", text) |
559
|
4 |
|
cleanlist = [] |
560
|
4 |
|
for item in prelimlist: |
561
|
4 |
|
if not item.endswith("\r\n"): # they should hire QC people... |
562
|
4 |
|
cleanlist.append(ptcrb_item_cleaner(item)) |
563
|
4 |
|
return cleanlist |
564
|
|
|
|
565
|
|
|
|
566
|
4 |
|
def space_pad(instring, minlength): |
567
|
|
|
""" |
568
|
|
|
Pad a string with spaces until it's the minimum length. |
569
|
|
|
|
570
|
|
|
:param instring: String to pad. |
571
|
|
|
:type instring: str |
572
|
|
|
|
573
|
|
|
:param minlength: Pad while len(instring) < minlength. |
574
|
|
|
:type minlength: int |
575
|
|
|
""" |
576
|
4 |
|
while len(instring) < minlength: |
577
|
4 |
|
instring += " " |
578
|
4 |
|
return instring |
579
|
|
|
|
580
|
|
|
|
581
|
4 |
|
def ptcrb_item_cleaner(item): |
582
|
|
|
""" |
583
|
|
|
Cleanup poorly formatted PTCRB entries written by an intern. |
584
|
|
|
|
585
|
|
|
:param item: The item to clean. |
586
|
|
|
:type item: str |
587
|
|
|
""" |
588
|
4 |
|
item = item.replace("<td>", "") |
589
|
4 |
|
item = item.replace("</td>", "") |
590
|
4 |
|
item = item.replace("\n", "") |
591
|
4 |
|
item = item.replace(" (SR", ", SR") |
592
|
4 |
|
item = re.sub(r"\s?\((.*)$", "", item) |
593
|
4 |
|
item = re.sub(r"\sSV.*$", "", item) |
594
|
4 |
|
item = item.replace(")", "") |
595
|
4 |
|
item = item.replace(". ", ".") |
596
|
4 |
|
item = item.replace(";", "") |
597
|
4 |
|
item = item.replace("version", "Version") |
598
|
4 |
|
item = item.replace("Verison", "Version") |
599
|
4 |
|
if item.count("OS") > 1: |
600
|
4 |
|
templist = item.split("OS") |
601
|
4 |
|
templist[0] = "OS" |
602
|
4 |
|
item = "".join([templist[0], templist[1]]) |
603
|
4 |
|
item = item.replace("SR", "SW Release") |
604
|
4 |
|
item = item.replace(" Version:", ":") |
605
|
4 |
|
item = item.replace("Version ", " ") |
606
|
4 |
|
item = item.replace(":1", ": 1") |
607
|
4 |
|
item = item.replace(", ", " ") |
608
|
4 |
|
item = item.replace("Software", "SW") |
609
|
4 |
|
item = item.replace(" ", " ") |
610
|
4 |
|
item = item.replace("OS ", "OS: ") |
611
|
4 |
|
item = item.replace("Radio ", "Radio: ") |
612
|
4 |
|
item = item.replace("Release ", "Release: ") |
613
|
4 |
|
spaclist = item.split(" ") |
614
|
4 |
|
if len(spaclist) > 1: |
615
|
4 |
|
spaclist[1] = space_pad(spaclist[1], 11) |
616
|
4 |
|
spaclist[3] = space_pad(spaclist[3], 11) |
617
|
|
|
else: |
618
|
4 |
|
spaclist.insert(0, "OS:") |
619
|
4 |
|
item = " ".join(spaclist) |
620
|
4 |
|
item = item.strip() |
621
|
4 |
|
return item |
622
|
|
|
|
623
|
|
|
|
624
|
4 |
|
@pem_wrapper |
625
|
4 |
|
def kernel_scraper(utils=False): |
626
|
|
|
""" |
627
|
|
|
Scrape BlackBerry's GitHub kernel repo for available branches. |
628
|
|
|
|
629
|
|
|
:param utils: Check android-utils repo instead of android-linux-kernel. Default is False. |
630
|
|
|
:type utils: bool |
631
|
|
|
""" |
632
|
4 |
|
repo = "android-utils" if utils else "android-linux-kernel" |
633
|
4 |
|
kernlist = [] |
634
|
4 |
|
sess = requests.Session() |
635
|
4 |
|
for page in range(1, 10): |
636
|
4 |
|
url = "https://github.com/blackberry/{0}/branches/all?page={1}".format(repo, page) |
637
|
4 |
|
soup = generic_soup_parser(url, sess) |
638
|
4 |
|
if soup.find("div", {"class": "no-results-message"}): |
639
|
4 |
|
break |
640
|
|
|
else: |
641
|
4 |
|
text = soup.get_text() |
642
|
4 |
|
kernlist.extend(re.findall(r"msm[0-9]{4}\/[A-Z0-9]{6}", text, re.IGNORECASE)) |
643
|
4 |
|
return kernlist |
644
|
|
|
|
645
|
|
|
|
646
|
4 |
|
def root_generator(folder, build, variant="common"): |
647
|
|
|
""" |
648
|
|
|
Generate roots for the SHAxxx hash lookup URLs. |
649
|
|
|
|
650
|
|
|
:param folder: Dictionary of variant: loader name pairs. |
651
|
|
|
:type folder: dict(str: str) |
652
|
|
|
|
653
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
654
|
|
|
:type build: str |
655
|
|
|
|
656
|
|
|
:param variant: Autoloader variant. Default is "common". |
657
|
|
|
:type variant: str |
658
|
|
|
""" |
659
|
|
|
#Priv specific |
660
|
4 |
|
privx = "bbfoundation/hashfiles_priv/{0}".format(folder[variant]) |
661
|
|
|
#DTEK50 specific |
662
|
4 |
|
dtek50x = "bbSupport/DTEK50" if build[:3] == "AAF" else "bbfoundation/hashfiles_priv/dtek50" |
663
|
|
|
#DTEK60 specific |
664
|
4 |
|
dtek60x = dtek50x # still uses dtek50 folder, for some reason |
665
|
|
|
#Pack it up |
666
|
4 |
|
roots = {"Priv": privx, "DTEK50": dtek50x, "DTEK60": dtek60x} |
667
|
4 |
|
return roots |
668
|
|
|
|
669
|
|
|
|
670
|
4 |
|
def make_droid_skeleton(method, build, device, variant="common"): |
671
|
|
|
""" |
672
|
|
|
Make an Android autoloader/hash URL. |
673
|
|
|
|
674
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
675
|
|
|
:type method: str |
676
|
|
|
|
677
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
678
|
|
|
:type build: str |
679
|
|
|
|
680
|
|
|
:param device: Device to check. |
681
|
|
|
:type device: str |
682
|
|
|
|
683
|
|
|
:param variant: Autoloader variant. Default is "common". |
684
|
|
|
:type variant: str |
685
|
|
|
""" |
686
|
4 |
|
folder = {"vzw-vzw": "verizon", "na-att": "att", "na-tmo": "tmo", "common": "default"} |
687
|
4 |
|
devices = {"Priv": "qc8992", "DTEK50": "qc8952_64_sfi", "DTEK60": "qc8996"} |
688
|
4 |
|
roots = root_generator(folder, build, variant) |
689
|
4 |
|
base = "bbry_{2}_autoloader_user-{0}-{1}".format(variant, build.upper(), devices[device]) |
690
|
4 |
|
if method is None: |
691
|
4 |
|
skel = "https://bbapps.download.blackberry.com/Priv/{0}.zip".format(base) |
692
|
|
|
else: |
693
|
4 |
|
skel = "http://ca.blackberry.com/content/dam/{1}/{0}.{2}sum".format(base, roots[device], method.lower()) |
694
|
4 |
|
return skel |
695
|
|
|
|
696
|
|
|
|
697
|
4 |
|
def bulk_droid_skeletons(devs, build, method=None): |
698
|
|
|
""" |
699
|
|
|
Prepare list of Android autoloader/hash URLs. |
700
|
|
|
|
701
|
|
|
:param devs: List of devices. |
702
|
|
|
:type devs: list(str) |
703
|
|
|
|
704
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
705
|
|
|
:type build: str |
706
|
|
|
|
707
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
708
|
|
|
:type method: str |
709
|
|
|
""" |
710
|
4 |
|
carrier_variants = ("common", "vzw-vzw", "na-tmo", "na-att") # device variants |
711
|
4 |
|
common_variants = ("common", ) # no Americans |
712
|
4 |
|
carrier_devices = ("Priv", ) # may this list never expand in the future |
713
|
4 |
|
skels = [] |
714
|
4 |
|
for dev in devs: |
715
|
4 |
|
varlist = carrier_variants if dev in carrier_devices else common_variants |
716
|
4 |
|
for var in varlist: |
717
|
4 |
|
skel = make_droid_skeleton(method, build, dev, var) |
718
|
4 |
|
skels.append(skel) |
719
|
4 |
|
return skels |
720
|
|
|
|
721
|
|
|
|
722
|
4 |
|
def prepare_droid_list(device): |
723
|
|
|
""" |
724
|
|
|
Convert single devices to a list, if necessary. |
725
|
|
|
|
726
|
|
|
:param device: Device to check. |
727
|
|
|
:type device: str |
728
|
|
|
""" |
729
|
4 |
|
if isinstance(device, list): |
730
|
4 |
|
devs = device |
731
|
|
|
else: |
732
|
4 |
|
devs = [device] |
733
|
4 |
|
return devs |
734
|
|
|
|
735
|
|
|
|
736
|
4 |
|
def droid_scanner(build, device, method=None, session=None): |
737
|
|
|
""" |
738
|
|
|
Check for Android autoloaders on BlackBerry's site. |
739
|
|
|
|
740
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
741
|
|
|
:type build: str |
742
|
|
|
|
743
|
|
|
:param device: Device to check. |
744
|
|
|
:type device: str |
745
|
|
|
|
746
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
747
|
|
|
:type method: str |
748
|
|
|
|
749
|
|
|
:param session: Requests session object, default is created on the fly. |
750
|
|
|
:type session: requests.Session() |
751
|
|
|
""" |
752
|
4 |
|
devs = prepare_droid_list(device) |
753
|
4 |
|
skels = bulk_droid_skeletons(devs, build, method) |
754
|
4 |
|
with concurrent.futures.ThreadPoolExecutor(max_workers=len(skels)) as xec: |
755
|
4 |
|
results = [] |
756
|
4 |
|
for skel in skels: |
757
|
4 |
|
avail = xec.submit(availability, skel, session) |
758
|
4 |
|
if avail.result(): |
759
|
4 |
|
results.append(skel) |
760
|
4 |
|
return results if results else None |
761
|
|
|
|
762
|
|
|
|
763
|
4 |
|
def chunker(iterable, inc): |
764
|
|
|
""" |
765
|
|
|
Convert an iterable into a list of inc sized lists. |
766
|
|
|
|
767
|
|
|
:param chunker: Iterable to chunk. |
768
|
|
|
:type chunker: list |
769
|
|
|
|
770
|
|
|
:param inc: Increment; how big each chunk is. |
771
|
|
|
:type inc: int |
772
|
|
|
""" |
773
|
4 |
|
chunks = [iterable[x:x+inc] for x in range(0, len(iterable), inc)] |
774
|
4 |
|
return chunks |
775
|
|
|
|
776
|
|
|
|
777
|
4 |
|
def unicode_filter(intext): |
778
|
|
|
""" |
779
|
|
|
Remove Unicode crap. |
780
|
|
|
|
781
|
|
|
:param intext: Text to filter. |
782
|
|
|
:type intext: str |
783
|
|
|
""" |
784
|
4 |
|
return intext.replace("\u2013", "").strip() |
785
|
|
|
|
786
|
|
|
|
787
|
4 |
|
def table_header_filter(ptag): |
788
|
|
|
""" |
789
|
|
|
Validate p tag, to see if it's relevant. |
790
|
|
|
|
791
|
|
|
:param ptag: P tag. |
792
|
|
|
:type ptag: bs4.element.Tag |
793
|
|
|
""" |
794
|
4 |
|
valid = ptag.find("b") and "BlackBerry" in ptag.text and not "experts" in ptag.text |
795
|
4 |
|
return valid |
796
|
|
|
|
797
|
|
|
|
798
|
4 |
|
def table_headers(pees): |
799
|
|
|
""" |
800
|
|
|
Generate table headers from list of p tags. |
801
|
|
|
|
802
|
|
|
:param pees: List of p tags. |
803
|
|
|
:type pees: list(bs4.element.Tag) |
804
|
|
|
""" |
805
|
4 |
|
bolds = [x.text for x in pees if table_header_filter(x)] |
806
|
4 |
|
return bolds |
807
|
|
|
|
808
|
|
|
|
809
|
4 |
|
@pem_wrapper |
810
|
|
|
def loader_page_scraper(): |
811
|
|
|
""" |
812
|
|
|
Return scraped autoloader page. |
813
|
|
|
""" |
814
|
4 |
|
url = "http://ca.blackberry.com/content/blackberry-com/en_ca/support/smartphones/Android-OS-Reload.html" |
815
|
4 |
|
sess = requests.Session() |
816
|
4 |
|
soup = generic_soup_parser(url, sess) |
817
|
4 |
|
tables = soup.find_all("table") |
818
|
4 |
|
headers = table_headers(soup.find_all("p")) |
819
|
4 |
|
for idx, table in enumerate(tables): |
820
|
4 |
|
print("~~~{0}~~~".format(headers[idx])) |
821
|
4 |
|
chunks = chunker(table.find_all("td"), 4) |
822
|
4 |
|
for chunk in chunks: |
823
|
4 |
|
key = unicode_filter(chunk[0].text) |
824
|
4 |
|
ver = unicode_filter(chunk[1].text) |
825
|
4 |
|
link = unicode_filter(chunk[2].find("a")["href"]) |
826
|
4 |
|
print("{0}\n {1}: {2}".format(key, ver, link)) |
827
|
4 |
|
print(" ") |
828
|
|
|
|
829
|
|
|
|
830
|
4 |
|
@pem_wrapper |
831
|
4 |
|
def base_metadata(url, session=None): |
832
|
|
|
""" |
833
|
|
|
Get BBNDK metadata, base function. |
834
|
|
|
|
835
|
|
|
:param url: URL to check. |
836
|
|
|
:type url: str |
837
|
|
|
|
838
|
|
|
:param session: Requests session object, default is created on the fly. |
839
|
|
|
:type session: requests.Session() |
840
|
|
|
""" |
841
|
4 |
|
session = requests.Session() if session is None else session |
842
|
4 |
|
req = session.get(url) |
843
|
4 |
|
data = req.content |
844
|
4 |
|
entries = data.split(b"\n") |
845
|
4 |
|
metadata = [entry.split(b",")[1].decode("utf-8") for entry in entries if entry] |
846
|
4 |
|
return metadata |
847
|
|
|
|
848
|
|
|
|
849
|
4 |
|
def ndk_metadata(session=None): |
850
|
|
|
""" |
851
|
|
|
Get BBNDK target metadata. |
852
|
|
|
|
853
|
|
|
:param session: Requests session object, default is created on the fly. |
854
|
|
|
:type session: requests.Session() |
855
|
|
|
""" |
856
|
4 |
|
data = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/metadata", session) |
857
|
4 |
|
metadata = [entry for entry in data if entry.startswith(("10.0", "10.1", "10.2"))] |
858
|
4 |
|
return metadata |
859
|
|
|
|
860
|
|
|
|
861
|
4 |
|
def sim_metadata(session=None): |
862
|
|
|
""" |
863
|
|
|
Get BBNDK simulator metadata. |
864
|
|
|
|
865
|
|
|
:param session: Requests session object, default is created on the fly. |
866
|
|
|
:type session: requests.Session() |
867
|
|
|
""" |
868
|
4 |
|
metadata = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/simulator/simulator_metadata", session) |
869
|
4 |
|
return metadata |
870
|
|
|
|
871
|
|
|
|
872
|
4 |
|
def runtime_metadata(session=None): |
873
|
|
|
""" |
874
|
|
|
Get BBNDK runtime metadata. |
875
|
|
|
|
876
|
|
|
:param session: Requests session object, default is created on the fly. |
877
|
|
|
:type session: requests.Session() |
878
|
|
|
""" |
879
|
4 |
|
metadata = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/runtime/runtime_metadata", session) |
880
|
4 |
|
return metadata |
881
|
|
|
|
882
|
|
|
|
883
|
4 |
|
def series_generator(osversion): |
884
|
|
|
""" |
885
|
|
|
Generate series/branch name from OS version. |
886
|
|
|
|
887
|
|
|
:param osversion: OS version. |
888
|
|
|
:type osversion: str |
889
|
|
|
""" |
890
|
4 |
|
splits = osversion.split(".") |
891
|
4 |
|
return "BB{0}_{1}_{2}".format(*splits[0:3]) |
892
|
|
|
|
893
|
|
|
|
894
|
4 |
|
@pem_wrapper |
895
|
4 |
|
def devalpha_urls(osversion, skel, session=None): |
896
|
|
|
""" |
897
|
|
|
Check individual Dev Alpha autoloader URLs. |
898
|
|
|
|
899
|
|
|
:param osversion: OS version. |
900
|
|
|
:type osversion: str |
901
|
|
|
|
902
|
|
|
:param skel: Individual skeleton format to try. |
903
|
|
|
:type skel: str |
904
|
|
|
|
905
|
|
|
:param session: Requests session object, default is created on the fly. |
906
|
|
|
:type session: requests.Session() |
907
|
|
|
""" |
908
|
4 |
|
session = requests.Session() if session is None else session |
909
|
4 |
|
url = "http://downloads.blackberry.com/upr/developers/downloads/{0}{1}.exe".format(skel, osversion) |
910
|
4 |
|
req = session.head(url) |
911
|
4 |
|
if req.status_code == 200: |
912
|
4 |
|
finals = (url, req.headers["content-length"]) |
913
|
|
|
else: |
914
|
4 |
|
finals = () |
915
|
4 |
|
return finals |
916
|
|
|
|
917
|
|
|
|
918
|
4 |
|
def devalpha_urls_bootstrap(osversion, skeletons, session=None): |
919
|
|
|
""" |
920
|
|
|
Get list of valid Dev Alpha autoloader URLs. |
921
|
|
|
|
922
|
|
|
:param osversion: OS version. |
923
|
|
|
:type osversion: str |
924
|
|
|
|
925
|
|
|
:param skeletons: List of skeleton formats to try. |
926
|
|
|
:type skeletons: list |
927
|
|
|
|
928
|
|
|
:param session: Requests session object, default is created on the fly. |
929
|
|
|
:type session: requests.Session() |
930
|
|
|
""" |
931
|
4 |
|
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as xec: |
932
|
4 |
|
try: |
933
|
4 |
|
finals = {} |
934
|
4 |
|
skels = skeletons |
935
|
4 |
|
for idx, skel in enumerate(skeletons): |
936
|
4 |
|
if "<SERIES>" in skel: |
937
|
4 |
|
skels[idx] = skel.replace("<SERIES>", series_generator(osversion)) |
938
|
4 |
|
for skel in skels: |
939
|
4 |
|
final = xec.submit(devalpha_urls, osversion, skel, session).result() |
940
|
4 |
|
if final: |
941
|
4 |
|
finals[final[0]] = final[1] |
942
|
4 |
|
return finals |
943
|
|
|
except KeyboardInterrupt: |
944
|
|
|
xec.shutdown(wait=False) |
945
|
|
|
|
946
|
|
|
|
947
|
4 |
|
def dev_dupe_dicter(finals): |
948
|
|
|
""" |
949
|
|
|
Prepare dictionary to clean duplicate autoloaders. |
950
|
|
|
|
951
|
|
|
:param finals: Dict of URL:content-length pairs. |
952
|
|
|
:type finals: dict(str: str) |
953
|
|
|
""" |
954
|
4 |
|
revo = {} |
955
|
4 |
|
for key, val in finals.items(): |
956
|
4 |
|
revo.setdefault(val, set()).add(key) |
957
|
4 |
|
return revo |
958
|
|
|
|
959
|
|
|
|
960
|
4 |
|
def dev_dupe_remover(finals, dupelist): |
961
|
|
|
""" |
962
|
|
|
Filter dictionary of autoloader entries. |
963
|
|
|
|
964
|
|
|
:param finals: Dict of URL:content-length pairs. |
965
|
|
|
:type finals: dict(str: str) |
966
|
|
|
|
967
|
|
|
:param dupelist: List of duplicate URLs. |
968
|
|
|
:type duplist: list(str) |
969
|
|
|
""" |
970
|
4 |
|
for dupe in dupelist: |
971
|
4 |
|
for entry in dupe: |
972
|
4 |
|
if "DevAlpha" in entry: |
973
|
4 |
|
del finals[entry] |
974
|
4 |
|
return finals |
975
|
|
|
|
976
|
|
|
|
977
|
4 |
|
def dev_dupe_cleaner(finals): |
978
|
|
|
""" |
979
|
|
|
Clean duplicate autoloader entries. |
980
|
|
|
|
981
|
|
|
:param finals: Dict of URL:content-length pairs. |
982
|
|
|
:type finals: dict(str: str) |
983
|
|
|
""" |
984
|
4 |
|
revo = dev_dupe_dicter(finals) |
985
|
4 |
|
dupelist = [val for key, val in revo.items() if len(val) > 1] |
986
|
4 |
|
finals = dev_dupe_remover(finals, dupelist) |
987
|
|
|
return finals |
988
|
|
|
|