1
|
|
|
#!/usr/bin/env python3 |
2
|
|
|
"""This module is used for network connections; APIs, downloading, etc.""" |
3
|
|
|
|
4
|
|
|
import os # filesystem read |
5
|
|
|
import xml.etree.ElementTree # XML parsing |
6
|
|
|
import re # regexes |
7
|
|
|
import concurrent.futures # multiprocessing/threading |
8
|
|
|
import glob # pem file lookup |
9
|
|
|
import requests # downloading |
10
|
|
|
from bs4 import BeautifulSoup # scraping |
11
|
|
|
from bbarchivist import utilities # parse filesize |
12
|
|
|
from bbarchivist.bbconstants import SERVERS # lookup servers |
13
|
|
|
|
14
|
|
|
__author__ = "Thurask" |
15
|
|
|
__license__ = "WTFPL v2" |
16
|
|
|
__copyright__ = "Copyright 2015-2016 Thurask" |
17
|
|
|
|
18
|
|
|
|
19
|
|
|
def grab_pem(): |
20
|
|
|
""" |
21
|
|
|
Work with either local cacerts or system cacerts. Since cx_freeze is dumb. |
22
|
|
|
""" |
23
|
|
|
try: |
24
|
|
|
pemfile = glob.glob(os.path.join(os.getcwd(), "cacert.pem"))[0] |
25
|
|
|
except IndexError: |
26
|
|
|
return requests.certs.where() # no local cacerts |
27
|
|
|
else: |
28
|
|
|
return os.path.abspath(pemfile) # local cacerts |
29
|
|
|
|
30
|
|
|
|
31
|
|
|
def pem_wrapper(method): |
32
|
|
|
""" |
33
|
|
|
Decorator to set REQUESTS_CA_BUNDLE. |
34
|
|
|
|
35
|
|
|
:param method: Method to use. |
36
|
|
|
:type method: function |
37
|
|
|
""" |
38
|
|
|
def wrapper(*args, **kwargs): |
39
|
|
|
""" |
40
|
|
|
Set REQUESTS_CA_BUNDLE before doing function. |
41
|
|
|
""" |
42
|
|
|
os.environ["REQUESTS_CA_BUNDLE"] = grab_pem() |
43
|
|
|
return method(*args, **kwargs) |
44
|
|
|
return wrapper |
45
|
|
|
|
46
|
|
|
|
47
|
|
|
def generic_soup_parser(url): |
48
|
|
|
""" |
49
|
|
|
Get a BeautifulSoup HTML parser for some URL. |
50
|
|
|
|
51
|
|
|
:param url: The URL to check. |
52
|
|
|
:type url: str |
53
|
|
|
""" |
54
|
|
|
req = requests.get(url) |
55
|
|
|
soup = BeautifulSoup(req.content, "html.parser") |
56
|
|
|
return soup |
57
|
|
|
|
58
|
|
|
|
59
|
|
|
@pem_wrapper |
60
|
|
|
def get_length(url): |
61
|
|
|
""" |
62
|
|
|
Get content-length header from some URL. |
63
|
|
|
|
64
|
|
|
:param url: The URL to check. |
65
|
|
|
:type url: str |
66
|
|
|
""" |
67
|
|
|
if url is None: |
68
|
|
|
return 0 |
69
|
|
|
try: |
70
|
|
|
heads = requests.head(url) |
71
|
|
|
fsize = heads.headers['content-length'] |
72
|
|
|
return int(fsize) |
73
|
|
|
except requests.ConnectionError: |
74
|
|
|
return 0 |
75
|
|
|
|
76
|
|
|
|
77
|
|
|
@pem_wrapper |
78
|
|
|
def download(url, output_directory=None): |
79
|
|
|
""" |
80
|
|
|
Download file from given URL. |
81
|
|
|
|
82
|
|
|
:param url: URL to download from. |
83
|
|
|
:type url: str |
84
|
|
|
|
85
|
|
|
:param output_directory: Download folder. Default is local. |
86
|
|
|
:type output_directory: str |
87
|
|
|
""" |
88
|
|
|
if output_directory is None: |
89
|
|
|
output_directory = os.getcwd() |
90
|
|
|
lfname = url.split('/')[-1] |
91
|
|
|
sname = utilities.stripper(lfname) |
92
|
|
|
fname = os.path.join(output_directory, lfname) |
93
|
|
|
with open(fname, "wb") as file: |
94
|
|
|
req = requests.get(url, stream=True) |
95
|
|
|
clength = req.headers['content-length'] |
96
|
|
|
fsize = utilities.fsizer(clength) |
97
|
|
|
if req.status_code == 200: # 200 OK |
98
|
|
|
print("DOWNLOADING {0} [{1}]".format(sname, fsize)) |
99
|
|
|
for chunk in req.iter_content(chunk_size=1024): |
100
|
|
|
file.write(chunk) |
101
|
|
|
else: |
102
|
|
|
print("ERROR: HTTP {0} IN {1}".format(req.status_code, lfname)) |
103
|
|
|
if os.stat(fname).st_size == 0: |
104
|
|
|
os.remove(fname) |
105
|
|
|
|
106
|
|
|
def download_bootstrap(urls, outdir=None, workers=5): |
107
|
|
|
""" |
108
|
|
|
Run downloaders for each file in given URL iterable. |
109
|
|
|
|
110
|
|
|
:param urls: URLs to download. |
111
|
|
|
:type urls: list |
112
|
|
|
|
113
|
|
|
:param outdir: Download folder. Default is handled in :func:`download`. |
114
|
|
|
:type outdir: str |
115
|
|
|
|
116
|
|
|
:param workers: Number of worker processes. Default is 5. |
117
|
|
|
:type workers: int |
118
|
|
|
""" |
119
|
|
|
workers = len(urls) if len(urls) < workers else workers |
120
|
|
|
spinman = utilities.SpinManager() |
121
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as xec: |
122
|
|
|
try: |
123
|
|
|
spinman.start() |
124
|
|
|
for url in urls: |
125
|
|
|
xec.submit(download, url, outdir) |
126
|
|
|
except (KeyboardInterrupt, SystemExit): |
127
|
|
|
xec.shutdown() |
128
|
|
|
spinman.stop() |
129
|
|
|
spinman.stop() |
130
|
|
|
utilities.spinner_clear() |
131
|
|
|
utilities.line_begin() |
132
|
|
|
|
133
|
|
|
|
134
|
|
|
@pem_wrapper |
135
|
|
|
def availability(url): |
136
|
|
|
""" |
137
|
|
|
Check HTTP status code of given URL. |
138
|
|
|
200 or 301-308 is OK, else is not. |
139
|
|
|
|
140
|
|
|
:param url: URL to check. |
141
|
|
|
:type url: str |
142
|
|
|
""" |
143
|
|
|
try: |
144
|
|
|
avlty = requests.head(url) |
145
|
|
|
status = int(avlty.status_code) |
146
|
|
|
return status == 200 or 300 < status <= 308 |
147
|
|
|
except requests.ConnectionError: |
148
|
|
|
return False |
149
|
|
|
|
150
|
|
|
|
151
|
|
|
def clean_availability(results, server): |
152
|
|
|
""" |
153
|
|
|
Clean availability for autolookup script. |
154
|
|
|
|
155
|
|
|
:param results: Result dict. |
156
|
|
|
:type results: dict(str: str) |
157
|
|
|
|
158
|
|
|
:param server: Server, key for result dict. |
159
|
|
|
:type server: str |
160
|
|
|
""" |
161
|
|
|
marker = "PD" if server == "p" else server.upper() |
162
|
|
|
rel = results[server.lower()] |
163
|
|
|
avail = marker if rel != "SR not in system" and rel is not None else " " |
164
|
|
|
return rel, avail |
165
|
|
|
|
166
|
|
|
|
167
|
|
|
@pem_wrapper |
168
|
|
|
def carrier_checker(mcc, mnc): |
169
|
|
|
""" |
170
|
|
|
Query BlackBerry World to map a MCC and a MNC to a country and carrier. |
171
|
|
|
|
172
|
|
|
:param mcc: Country code. |
173
|
|
|
:type mcc: int |
174
|
|
|
|
175
|
|
|
:param mnc: Network code. |
176
|
|
|
:type mnc: int |
177
|
|
|
""" |
178
|
|
|
url = "http://appworld.blackberry.com/ClientAPI/checkcarrier?homemcc={0}&homemnc={1}&devicevendorid=-1&pin=0".format( |
179
|
|
|
mcc, mnc) |
180
|
|
|
user_agent = {'User-agent': 'AppWorld/5.1.0.60'} |
181
|
|
|
req = requests.get(url, headers=user_agent) |
182
|
|
|
root = xml.etree.ElementTree.fromstring(req.text) |
183
|
|
|
for child in root: |
184
|
|
|
if child.tag == "country": |
185
|
|
|
country = child.get("name") |
186
|
|
|
if child.tag == "carrier": |
187
|
|
|
carrier = child.get("name") |
188
|
|
|
return country, carrier |
189
|
|
|
|
190
|
|
|
|
191
|
|
|
def return_npc(mcc, mnc): |
192
|
|
|
""" |
193
|
|
|
Format MCC and MNC into a NPC. |
194
|
|
|
|
195
|
|
|
:param mcc: Country code. |
196
|
|
|
:type mcc: int |
197
|
|
|
|
198
|
|
|
:param mnc: Network code. |
199
|
|
|
:type mnc: int |
200
|
|
|
""" |
201
|
|
|
return "{0}{1}30".format(str(mcc).zfill(3), str(mnc).zfill(3)) |
202
|
|
|
|
203
|
|
|
|
204
|
|
|
@pem_wrapper |
205
|
|
|
def carrier_query(npc, device, upgrade=False, blitz=False, forced=None): |
206
|
|
|
""" |
207
|
|
|
Query BlackBerry servers, check which update is out for a carrier. |
208
|
|
|
|
209
|
|
|
:param npc: MCC + MNC (see `func:return_npc`) |
210
|
|
|
:type npc: int |
211
|
|
|
|
212
|
|
|
:param device: Hexadecimal hardware ID. |
213
|
|
|
:type device: str |
214
|
|
|
|
215
|
|
|
:param upgrade: Whether to use upgrade files. False by default. |
216
|
|
|
:type upgrade: bool |
217
|
|
|
|
218
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
219
|
|
|
:type blitz: bool |
220
|
|
|
|
221
|
|
|
:param forced: Force a software release. |
222
|
|
|
:type forced: str |
223
|
|
|
""" |
224
|
|
|
upg = "upgrade" if upgrade else "repair" |
225
|
|
|
forced = "latest" if forced is None else forced |
226
|
|
|
url = "https://cs.sl.blackberry.com/cse/updateDetails/2.2/" |
227
|
|
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
228
|
|
|
query += '<updateDetailRequest version="2.2.1" authEchoTS="1366644680359">' |
229
|
|
|
query += "<clientProperties>" |
230
|
|
|
query += "<hardware>" |
231
|
|
|
query += "<pin>0x2FFFFFB3</pin><bsn>1128121361</bsn>" |
232
|
|
|
query += "<imei>004401139269240</imei>" |
233
|
|
|
query += "<id>0x{0}</id>".format(device) |
234
|
|
|
query += "</hardware>" |
235
|
|
|
query += "<network>" |
236
|
|
|
query += "<homeNPC>0x{0}</homeNPC>".format(npc) |
237
|
|
|
query += "<iccid>89014104255505565333</iccid>" |
238
|
|
|
query += "</network>" |
239
|
|
|
query += "<software>" |
240
|
|
|
query += "<currentLocale>en_US</currentLocale>" |
241
|
|
|
query += "<legalLocale>en_US</legalLocale>" |
242
|
|
|
query += "</software>" |
243
|
|
|
query += "</clientProperties>" |
244
|
|
|
query += "<updateDirectives>" |
245
|
|
|
query += '<allowPatching type="REDBEND">true</allowPatching>' |
246
|
|
|
query += "<upgradeMode>{0}</upgradeMode>".format(upg) |
247
|
|
|
query += "<provideDescriptions>false</provideDescriptions>" |
248
|
|
|
query += "<provideFiles>true</provideFiles>" |
249
|
|
|
query += "<queryType>NOTIFICATION_CHECK</queryType>" |
250
|
|
|
query += "</updateDirectives>" |
251
|
|
|
query += "<pollType>manual</pollType>" |
252
|
|
|
query += "<resultPackageSetCriteria>" |
253
|
|
|
query += '<softwareRelease softwareReleaseVersion="{0}" />'.format(forced) |
254
|
|
|
query += "<releaseIndependent>" |
255
|
|
|
query += '<packageType operation="include">application</packageType>' |
256
|
|
|
query += "</releaseIndependent>" |
257
|
|
|
query += "</resultPackageSetCriteria>" |
258
|
|
|
query += "</updateDetailRequest>" |
259
|
|
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
260
|
|
|
req = requests.post(url, headers=header, data=query) |
261
|
|
|
return parse_carrier_xml(req.text, blitz) |
262
|
|
|
|
263
|
|
|
|
264
|
|
|
def carrier_swver_get(root): |
265
|
|
|
""" |
266
|
|
|
Get software release from carrier XML. |
267
|
|
|
|
268
|
|
|
:param root: ElementTree we're barking up. |
269
|
|
|
:type root: xml.etree.ElementTree.ElementTree |
270
|
|
|
""" |
271
|
|
|
for child in root.iter("softwareReleaseMetadata"): |
272
|
|
|
swver = child.get("softwareReleaseVersion") |
273
|
|
|
return swver |
274
|
|
|
|
275
|
|
|
|
276
|
|
|
def carrier_child_fileappend(child, files, baseurl, blitz=False): |
277
|
|
|
""" |
278
|
|
|
Append bar file links to a list from a child element. |
279
|
|
|
|
280
|
|
|
:param child: Child element in use. |
281
|
|
|
:type child: xml.etree.ElementTree.Element |
282
|
|
|
|
283
|
|
|
:param files: Filelist. |
284
|
|
|
:type files: list(str) |
285
|
|
|
|
286
|
|
|
:param baseurl: Base URL, URL minus the filename. |
287
|
|
|
:type baseurl: str |
288
|
|
|
|
289
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
290
|
|
|
:type blitz: bool |
291
|
|
|
""" |
292
|
|
|
if not blitz: |
293
|
|
|
files.append(baseurl + child.get("path")) |
294
|
|
|
else: |
295
|
|
|
if child.get("type") not in ["system:radio", "system:desktop", "system:os"]: |
296
|
|
|
files.append(baseurl + child.get("path")) |
297
|
|
|
return files |
298
|
|
|
|
299
|
|
|
|
300
|
|
|
def carrier_child_finder(root, files, baseurl, blitz=False): |
301
|
|
|
""" |
302
|
|
|
Extract filenames, radio and OS from child elements. |
303
|
|
|
|
304
|
|
|
:param root: ElementTree we're barking up. |
305
|
|
|
:type root: xml.etree.ElementTree.ElementTree |
306
|
|
|
|
307
|
|
|
:param files: Filelist. |
308
|
|
|
:type files: list(str) |
309
|
|
|
|
310
|
|
|
:param baseurl: Base URL, URL minus the filename. |
311
|
|
|
:type baseurl: str |
312
|
|
|
|
313
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
314
|
|
|
:type blitz: bool |
315
|
|
|
""" |
316
|
|
|
osver = radver = "" |
317
|
|
|
for child in root.iter("package"): |
318
|
|
|
files = carrier_child_fileappend(child, files, baseurl, blitz) |
319
|
|
|
if child.get("type") == "system:radio": |
320
|
|
|
radver = child.get("version") |
321
|
|
|
elif child.get("type") == "system:desktop": |
322
|
|
|
osver = child.get("version") |
323
|
|
|
elif child.get("type") == "system:os": |
324
|
|
|
osver = child.get("version") |
325
|
|
|
return osver, radver, files |
326
|
|
|
|
327
|
|
|
|
328
|
|
|
def parse_carrier_xml(data, blitz=False): |
329
|
|
|
""" |
330
|
|
|
Parse the response to a carrier update request and return the juicy bits. |
331
|
|
|
|
332
|
|
|
:param data: The data to parse. |
333
|
|
|
:type data: xml |
334
|
|
|
|
335
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
336
|
|
|
:type blitz: bool |
337
|
|
|
""" |
338
|
|
|
root = xml.etree.ElementTree.fromstring(data) |
339
|
|
|
sw_exists = root.find('./data/content/softwareReleaseMetadata') |
340
|
|
|
swver = "N/A" if sw_exists is None else "" |
341
|
|
|
if sw_exists is not None: |
342
|
|
|
swver = carrier_swver_get(root) |
343
|
|
|
files = [] |
344
|
|
|
package_exists = root.find('./data/content/fileSets/fileSet') |
345
|
|
|
osver = radver = "" |
346
|
|
|
if package_exists is not None: |
347
|
|
|
baseurl = "{0}/".format(package_exists.get("url")) |
348
|
|
|
osver, radver, files = carrier_child_finder(root, files, baseurl, blitz) |
349
|
|
|
return(swver, osver, radver, files) |
350
|
|
|
|
351
|
|
|
|
352
|
|
|
@pem_wrapper |
353
|
|
|
def sr_lookup(osver, server): |
354
|
|
|
""" |
355
|
|
|
Software release lookup, with choice of server. |
356
|
|
|
:data:`bbarchivist.bbconstants.SERVERLIST` for server list. |
357
|
|
|
|
358
|
|
|
:param osver: OS version to lookup, 10.x.y.zzzz. |
359
|
|
|
:type osver: str |
360
|
|
|
|
361
|
|
|
:param server: Server to use. |
362
|
|
|
:type server: str |
363
|
|
|
""" |
364
|
|
|
reg = re.compile(r"(\d{1,4}\.)(\d{1,4}\.)(\d{1,4}\.)(\d{1,4})") |
365
|
|
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
366
|
|
|
query += '<srVersionLookupRequest version="2.0.0"' |
367
|
|
|
query += ' authEchoTS="1366644680359">' |
368
|
|
|
query += '<clientProperties><hardware>' |
369
|
|
|
query += '<pin>0x2FFFFFB3</pin><bsn>1140011878</bsn>' |
370
|
|
|
query += '<imei>004402242176786</imei><id>0x8D00240A</id>' |
371
|
|
|
query += '<isBootROMSecure>true</isBootROMSecure>' |
372
|
|
|
query += '</hardware>' |
373
|
|
|
query += '<network>' |
374
|
|
|
query += '<vendorId>0x0</vendorId><homeNPC>0x60</homeNPC>' |
375
|
|
|
query += '<currentNPC>0x60</currentNPC><ecid>0x1</ecid>' |
376
|
|
|
query += '</network>' |
377
|
|
|
query += '<software><currentLocale>en_US</currentLocale>' |
378
|
|
|
query += '<legalLocale>en_US</legalLocale>' |
379
|
|
|
query += '<osVersion>{0}</osVersion>'.format(osver) |
380
|
|
|
query += '<omadmEnabled>false</omadmEnabled>' |
381
|
|
|
query += '</software></clientProperties>' |
382
|
|
|
query += '</srVersionLookupRequest>' |
383
|
|
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
384
|
|
|
try: |
385
|
|
|
req = requests.post(server, headers=header, data=query, timeout=1) |
386
|
|
|
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError): |
387
|
|
|
return "SR not in system" |
388
|
|
|
try: |
389
|
|
|
root = xml.etree.ElementTree.fromstring(req.text) |
390
|
|
|
except xml.etree.ElementTree.ParseError: |
391
|
|
|
return "SR not in system" |
392
|
|
|
else: |
393
|
|
|
packages = root.findall('./data/content/') |
394
|
|
|
for package in packages: |
395
|
|
|
if package.text is not None: |
396
|
|
|
match = reg.match(package.text) |
397
|
|
|
if match: |
398
|
|
|
return package.text |
399
|
|
|
else: |
400
|
|
|
return "SR not in system" |
401
|
|
|
|
402
|
|
|
|
403
|
|
|
def sr_lookup_bootstrap(osv): |
404
|
|
|
""" |
405
|
|
|
Run lookups for each server for given OS. |
406
|
|
|
|
407
|
|
|
:param osv: OS to check. |
408
|
|
|
:type osv: str |
409
|
|
|
""" |
410
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as xec: |
411
|
|
|
try: |
412
|
|
|
results = { |
413
|
|
|
"p": None, |
414
|
|
|
"a1": None, |
415
|
|
|
"a2": None, |
416
|
|
|
"b1": None, |
417
|
|
|
"b2": None |
418
|
|
|
} |
419
|
|
|
for key in results: |
420
|
|
|
results[key] = xec.submit(sr_lookup, osv, SERVERS[key]).result() |
421
|
|
|
return results |
422
|
|
|
except KeyboardInterrupt: |
423
|
|
|
xec.shutdown(wait=False) |
424
|
|
|
|
425
|
|
|
|
426
|
|
|
@pem_wrapper |
427
|
|
|
def available_bundle_lookup(mcc, mnc, device): |
428
|
|
|
""" |
429
|
|
|
Check which software releases were ever released for a carrier. |
430
|
|
|
|
431
|
|
|
:param mcc: Country code. |
432
|
|
|
:type mcc: int |
433
|
|
|
|
434
|
|
|
:param mnc: Network code. |
435
|
|
|
:type mnc: int |
436
|
|
|
|
437
|
|
|
:param device: Hexadecimal hardware ID. |
438
|
|
|
:type device: str |
439
|
|
|
""" |
440
|
|
|
server = "https://cs.sl.blackberry.com/cse/availableBundles/1.0.0/" |
441
|
|
|
npc = return_npc(mcc, mnc) |
442
|
|
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
443
|
|
|
query += '<availableBundlesRequest version="1.0.0" ' |
444
|
|
|
query += 'authEchoTS="1366644680359">' |
445
|
|
|
query += '<deviceId><pin>0x2FFFFFB3</pin></deviceId>' |
446
|
|
|
query += '<clientProperties><hardware><id>0x{0}</id>'.format(device) |
447
|
|
|
query += '<isBootROMSecure>true</isBootROMSecure></hardware>' |
448
|
|
|
query += '<network><vendorId>0x0</vendorId><homeNPC>0x{0}</homeNPC>'.format(npc) |
449
|
|
|
query += '<currentNPC>0x{0}</currentNPC></network><software>'.format(npc) |
450
|
|
|
query += '<currentLocale>en_US</currentLocale>' |
451
|
|
|
query += '<legalLocale>en_US</legalLocale>' |
452
|
|
|
query += '<osVersion>10.0.0.0</osVersion>' |
453
|
|
|
query += '<radioVersion>10.0.0.0</radioVersion></software>' |
454
|
|
|
query += '</clientProperties><updateDirectives><bundleVersionFilter>' |
455
|
|
|
query += '</bundleVersionFilter></updateDirectives>' |
456
|
|
|
query += '</availableBundlesRequest>' |
457
|
|
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
458
|
|
|
req = requests.post(server, headers=header, data=query) |
459
|
|
|
root = xml.etree.ElementTree.fromstring(req.text) |
460
|
|
|
package = root.find('./data/content') |
461
|
|
|
bundlelist = [child.attrib["version"] for child in package] |
462
|
|
|
return bundlelist |
463
|
|
|
|
464
|
|
|
|
465
|
|
|
@pem_wrapper |
466
|
|
|
def ptcrb_scraper(ptcrbid): |
467
|
|
|
""" |
468
|
|
|
Get the PTCRB results for a given device. |
469
|
|
|
|
470
|
|
|
:param ptcrbid: Numerical ID from PTCRB (end of URL). |
471
|
|
|
:type ptcrbid: str |
472
|
|
|
""" |
473
|
|
|
baseurl = "https://ptcrb.com/vendor/complete/view_complete_request_guest.cfm?modelid={0}".format( |
474
|
|
|
ptcrbid) |
475
|
|
|
soup = generic_soup_parser(baseurl) |
476
|
|
|
text = soup.get_text() |
477
|
|
|
text = text.replace("\r\n", " ") |
478
|
|
|
prelimlist = re.findall("OS .+[^\\n]", text, re.IGNORECASE) |
479
|
|
|
if not prelimlist: # Priv |
480
|
|
|
prelimlist = re.findall(r"[A-Z]{3}[0-9]{3}[\s]", text) |
481
|
|
|
cleanlist = [] |
482
|
|
|
for item in prelimlist: |
483
|
|
|
if not item.endswith("\r\n"): # they should hire QC people... |
484
|
|
|
cleanlist.append(ptcrb_item_cleaner(item)) |
485
|
|
|
return cleanlist |
486
|
|
|
|
487
|
|
|
|
488
|
|
|
def ptcrb_item_cleaner(item): |
489
|
|
|
""" |
490
|
|
|
Cleanup poorly formatted PTCRB entries written by an intern. |
491
|
|
|
|
492
|
|
|
:param item: The item to clean. |
493
|
|
|
:type item: str |
494
|
|
|
""" |
495
|
|
|
item = item.replace("<td>", "") |
496
|
|
|
item = item.replace("</td>", "") |
497
|
|
|
item = item.replace("\n", "") |
498
|
|
|
item = item.replace(" (SR", ", SR") |
499
|
|
|
item = re.sub(r"\s?\((.*)$", "", item) |
500
|
|
|
item = re.sub(r"\sSV.*$", "", item) |
501
|
|
|
item = item.replace(")", "") |
502
|
|
|
item = item.replace(". ", ".") |
503
|
|
|
item = item.replace(";", "") |
504
|
|
|
item = item.replace("version", "Version") |
505
|
|
|
item = item.replace("Verison", "Version") |
506
|
|
|
if item.count("OS") > 1: |
507
|
|
|
templist = item.split("OS") |
508
|
|
|
templist[0] = "OS" |
509
|
|
|
item = "".join([templist[0], templist[1]]) |
510
|
|
|
item = item.replace("SR", "SW Release") |
511
|
|
|
item = item.replace(" Version:", ":") |
512
|
|
|
item = item.replace("Version ", " ") |
513
|
|
|
item = item.replace(":1", ": 1") |
514
|
|
|
item = item.replace(", ", " ") |
515
|
|
|
item = item.replace("Software", "SW") |
516
|
|
|
item = item.replace(" ", " ") |
517
|
|
|
item = item.replace("OS ", "OS: ") |
518
|
|
|
item = item.replace("Radio ", "Radio: ") |
519
|
|
|
item = item.replace("Release ", "Release: ") |
520
|
|
|
spaclist = item.split(" ") |
521
|
|
|
if len(spaclist) > 1: |
522
|
|
|
while len(spaclist[1]) < 11: |
523
|
|
|
spaclist[1] += " " |
524
|
|
|
while len(spaclist[3]) < 11: |
525
|
|
|
spaclist[3] += " " |
526
|
|
|
else: |
527
|
|
|
spaclist.insert(0, "OS:") |
528
|
|
|
item = " ".join(spaclist) |
529
|
|
|
item = item.strip() |
530
|
|
|
return item |
531
|
|
|
|
532
|
|
|
|
533
|
|
|
@pem_wrapper |
534
|
|
|
def kernel_scraper(utils=False): |
535
|
|
|
""" |
536
|
|
|
Scrape BlackBerry's GitHub kernel repo for available branches. |
537
|
|
|
|
538
|
|
|
:param utils: Check android-utils repo instead of android-linux-kernel. Default is False. |
539
|
|
|
:type utils: bool |
540
|
|
|
""" |
541
|
|
|
repo = "android-utils" if utils else "android-linux-kernel" |
542
|
|
|
kernlist = [] |
543
|
|
|
for page in range(1, 10): |
544
|
|
|
url = "https://github.com/blackberry/{0}/branches/all?page={1}".format(repo, page) |
545
|
|
|
soup = generic_soup_parser(url) |
546
|
|
|
if soup.find("div", {"class": "no-results-message"}): |
547
|
|
|
break |
548
|
|
|
else: |
549
|
|
|
text = soup.get_text() |
550
|
|
|
kernlist.extend(re.findall(r"msm[0-9]{4}\/[A-Z0-9]{6}", text, re.IGNORECASE)) |
551
|
|
|
return kernlist |
552
|
|
|
|
553
|
|
|
|
554
|
|
|
def root_generator(folder, build, variant="common"): |
555
|
|
|
""" |
556
|
|
|
Generate roots for the SHAxxx hash lookup URLs. |
557
|
|
|
|
558
|
|
|
:param folder: Dictionary of variant: loader name pairs. |
559
|
|
|
:type folder: dict(str: str) |
560
|
|
|
|
561
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
562
|
|
|
:type build: str |
563
|
|
|
|
564
|
|
|
:param variant: Autoloader variant. Default is "common". |
565
|
|
|
:type variant: str |
566
|
|
|
""" |
567
|
|
|
#Priv specific |
568
|
|
|
privx = "bbfoundation/hashfiles_priv/{0}".format(folder[variant]) |
569
|
|
|
#DTEK50 specific |
570
|
|
|
dtek50x = "bbSupport/DTEK50" if build[:3] == "AAF" else "bbfoundation/hashfiles_priv/dtek50" |
571
|
|
|
#Pack it up |
572
|
|
|
roots = {"Priv": privx, "DTEK50": dtek50x} |
573
|
|
|
return roots |
574
|
|
|
|
575
|
|
|
|
576
|
|
|
def make_droid_skeleton(method, build, device, variant="common"): |
577
|
|
|
""" |
578
|
|
|
Make an Android autoloader/hash URL. |
579
|
|
|
|
580
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
581
|
|
|
:type method: str |
582
|
|
|
|
583
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
584
|
|
|
:type build: str |
585
|
|
|
|
586
|
|
|
:param device: Device to check. |
587
|
|
|
:type device: str |
588
|
|
|
|
589
|
|
|
:param variant: Autoloader variant. Default is "common". |
590
|
|
|
:type variant: str |
591
|
|
|
""" |
592
|
|
|
folder = {"vzw-vzw": "verizon", "na-att": "att", "na-tmo": "tmo", "common": "default"} |
593
|
|
|
devices = {"Priv": "qc8992", "DTEK50": "qc8952_64_sfi"} |
594
|
|
|
roots = root_generator(folder, build, variant) |
595
|
|
|
base = "bbry_{2}_autoloader_user-{0}-{1}".format(variant, build.upper(), devices[device]) |
596
|
|
|
if method is None: |
597
|
|
|
skel = "https://bbapps.download.blackberry.com/Priv/{0}.zip".format(base) |
598
|
|
|
else: |
599
|
|
|
skel = "http://ca.blackberry.com/content/dam/{1}/{0}.{2}sum".format(base, roots[device], method.lower()) |
600
|
|
|
return skel |
601
|
|
|
|
602
|
|
|
|
603
|
|
|
def bulk_droid_skeletons(devs, build, method=None): |
604
|
|
|
""" |
605
|
|
|
Prepare list of Android autoloader/hash URLs. |
606
|
|
|
|
607
|
|
|
:param devs: List of devices. |
608
|
|
|
:type devs: list(str) |
609
|
|
|
|
610
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
611
|
|
|
:type build: str |
612
|
|
|
|
613
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
614
|
|
|
:type method: str |
615
|
|
|
""" |
616
|
|
|
carrier_variants = ("common", "vzw-vzw", "na-tmo", "na-att") # device variants |
617
|
|
|
common_variants = ("common", ) # no Americans |
618
|
|
|
carrier_devices = ("Priv", ) # may this list never expand in the future |
619
|
|
|
skels = [] |
620
|
|
|
for dev in devs: |
621
|
|
|
varlist = carrier_variants if dev in carrier_devices else common_variants |
622
|
|
|
for var in varlist: |
623
|
|
|
skel = make_droid_skeleton(method, build, dev, var) |
624
|
|
|
skels.append(skel) |
625
|
|
|
return skels |
626
|
|
|
|
627
|
|
|
|
628
|
|
|
def prepare_droid_list(device): |
629
|
|
|
""" |
630
|
|
|
Convert single devices to a list, if necessary. |
631
|
|
|
|
632
|
|
|
:param device: Device to check. |
633
|
|
|
:type device: str |
634
|
|
|
""" |
635
|
|
|
if isinstance(device, list): |
636
|
|
|
devs = device |
637
|
|
|
else: |
638
|
|
|
devs = [] |
639
|
|
|
devs.append(device) |
640
|
|
|
return devs |
641
|
|
|
|
642
|
|
|
|
643
|
|
|
def droid_scanner(build, device, method=None): |
644
|
|
|
""" |
645
|
|
|
Check for Android autoloaders on BlackBerry's site. |
646
|
|
|
|
647
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
648
|
|
|
:type build: str |
649
|
|
|
|
650
|
|
|
:param device: Device to check. |
651
|
|
|
:type device: str |
652
|
|
|
|
653
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
654
|
|
|
:type method: str |
655
|
|
|
""" |
656
|
|
|
devs = prepare_droid_list(device) |
657
|
|
|
skels = bulk_droid_skeletons(devs, build, method) |
658
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=len(skels)) as xec: |
659
|
|
|
results = [] |
660
|
|
|
for skel in skels: |
661
|
|
|
avail = xec.submit(availability, skel) |
662
|
|
|
if avail.result(): |
663
|
|
|
results.append(skel) |
664
|
|
|
return results if results else None |
665
|
|
|
|
666
|
|
|
|
667
|
|
|
def chunker(iterable, inc): |
668
|
|
|
""" |
669
|
|
|
Convert an iterable into a list of inc sized lists. |
670
|
|
|
|
671
|
|
|
:param chunker: Iterable to chunk. |
672
|
|
|
:type chunker: list |
673
|
|
|
|
674
|
|
|
:param inc: Increment; how big each chunk is. |
675
|
|
|
:type inc: int |
676
|
|
|
""" |
677
|
|
|
chunks = [iterable[x:x+inc] for x in range(0, len(iterable), inc)] |
678
|
|
|
return chunks |
679
|
|
|
|
680
|
|
|
|
681
|
|
|
def unicode_filter(intext): |
682
|
|
|
""" |
683
|
|
|
Remove Unicode crap. |
684
|
|
|
|
685
|
|
|
:param intext: Text to filter. |
686
|
|
|
:type intext: str |
687
|
|
|
""" |
688
|
|
|
return intext.replace("\u2013", "").strip() |
689
|
|
|
|
690
|
|
|
|
691
|
|
|
def table_header_filter(ptag): |
692
|
|
|
""" |
693
|
|
|
Validate p tag, to see if it's relevant. |
694
|
|
|
|
695
|
|
|
:param ptag: P tag. |
696
|
|
|
:type ptag: bs4.element.Tag |
697
|
|
|
""" |
698
|
|
|
valid = ptag.find("b") and "BlackBerry" in ptag.text and not "experts" in ptag.text |
699
|
|
|
return valid |
700
|
|
|
|
701
|
|
|
|
702
|
|
|
def table_headers(pees): |
703
|
|
|
""" |
704
|
|
|
Generate table headers from list of p tags. |
705
|
|
|
|
706
|
|
|
:param pees: List of p tags. |
707
|
|
|
:type pees: list(bs4.element.Tag) |
708
|
|
|
""" |
709
|
|
|
bolds = [x.text for x in pees if table_header_filter(x)] |
710
|
|
|
return bolds |
711
|
|
|
|
712
|
|
|
|
713
|
|
|
@pem_wrapper |
714
|
|
|
def loader_page_scraper(): |
715
|
|
|
""" |
716
|
|
|
Return scraped autoloader page. |
717
|
|
|
""" |
718
|
|
|
url = "http://ca.blackberry.com/content/blackberry-com/en_ca/support/smartphones/Android-OS-Reload.html" |
719
|
|
|
soup = generic_soup_parser(url) |
720
|
|
|
tables = soup.find_all("table") |
721
|
|
|
headers = table_headers(soup.find_all("p")) |
722
|
|
|
for idx, table in enumerate(tables): |
723
|
|
|
print("~~~{0}~~~".format(headers[idx])) |
724
|
|
|
chunks = chunker(table.find_all("td"), 4) |
725
|
|
|
for chunk in chunks: |
726
|
|
|
key = unicode_filter(chunk[0].text) |
727
|
|
|
ver = unicode_filter(chunk[1].text) |
728
|
|
|
link = unicode_filter(chunk[2].find("a")["href"]) |
729
|
|
|
print("{0}\n {1}: {2}".format(key, ver, link)) |
730
|
|
|
print(" ") |
731
|
|
|
|
732
|
|
|
|
733
|
|
|
@pem_wrapper |
734
|
|
|
def base_metadata(url): |
735
|
|
|
""" |
736
|
|
|
Get BBNDK metadata, base function. |
737
|
|
|
""" |
738
|
|
|
req = requests.get(url) |
739
|
|
|
data = req.content |
740
|
|
|
entries = data.split(b"\n") |
741
|
|
|
metadata = [entry.split(b",")[1].decode("utf-8") for entry in entries if entry] |
742
|
|
|
return metadata |
743
|
|
|
|
744
|
|
|
|
745
|
|
|
def ndk_metadata(): |
746
|
|
|
""" |
747
|
|
|
Get BBNDK target metadata. |
748
|
|
|
""" |
749
|
|
|
data = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/metadata") |
750
|
|
|
metadata = [entry for entry in data if entry.startswith(("10.0", "10.1", "10.2"))] |
751
|
|
|
return metadata |
752
|
|
|
|
753
|
|
|
|
754
|
|
|
def sim_metadata(): |
755
|
|
|
""" |
756
|
|
|
Get BBNDK simulator metadata. |
757
|
|
|
""" |
758
|
|
|
metadata = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/simulator/simulator_metadata") |
759
|
|
|
return metadata |
760
|
|
|
|
761
|
|
|
|
762
|
|
|
def runtime_metadata(): |
763
|
|
|
""" |
764
|
|
|
Get BBNDK runtime metadata. |
765
|
|
|
""" |
766
|
|
|
metadata = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/runtime/runtime_metadata") |
767
|
|
|
return metadata |
768
|
|
|
|
769
|
|
|
|
770
|
|
|
def series_generator(osversion): |
771
|
|
|
""" |
772
|
|
|
Generate series/branch name from OS version. |
773
|
|
|
|
774
|
|
|
:param osversion: OS version. |
775
|
|
|
:type osversion: str |
776
|
|
|
""" |
777
|
|
|
splits = osversion.split(".") |
778
|
|
|
return "BB{0}_{1}_{2}".format(*splits[0:3]) |
779
|
|
|
|
780
|
|
|
|
781
|
|
|
@pem_wrapper |
782
|
|
|
def devalpha_urls(osversion, skel): |
783
|
|
|
""" |
784
|
|
|
Check individual Dev Alpha autoloader URLs. |
785
|
|
|
|
786
|
|
|
:param osversion: OS version. |
787
|
|
|
:type osversion: str |
788
|
|
|
|
789
|
|
|
:param skel: Individual skeleton format to try. |
790
|
|
|
:type skel: str |
791
|
|
|
""" |
792
|
|
|
url = "http://downloads.blackberry.com/upr/developers/downloads/{0}{1}.exe".format(skel, osversion) |
793
|
|
|
req = requests.head(url) |
794
|
|
|
if req.status_code == 200: |
795
|
|
|
finals = (url, req.headers["content-length"]) |
796
|
|
|
else: |
797
|
|
|
finals = () |
798
|
|
|
return finals |
799
|
|
|
|
800
|
|
|
|
801
|
|
|
def devalpha_urls_bootstrap(osversion, skeletons): |
802
|
|
|
""" |
803
|
|
|
Get list of valid Dev Alpha autoloader URLs. |
804
|
|
|
|
805
|
|
|
:param osversion: OS version. |
806
|
|
|
:type osversion: str |
807
|
|
|
|
808
|
|
|
:param skeletons: List of skeleton formats to try. |
809
|
|
|
:type skeletons: list |
810
|
|
|
""" |
811
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as xec: |
812
|
|
|
try: |
813
|
|
|
finals = {} |
814
|
|
|
skels = skeletons |
815
|
|
|
for idx, skel in enumerate(skeletons): |
816
|
|
|
if "<SERIES>" in skel: |
817
|
|
|
skels[idx] = skel.replace("<SERIES>", series_generator(osversion)) |
818
|
|
|
for skel in skels: |
819
|
|
|
final = xec.submit(devalpha_urls, osversion, skel).result() |
820
|
|
|
if final: |
821
|
|
|
finals[final[0]] = final[1] |
822
|
|
|
return finals |
823
|
|
|
except KeyboardInterrupt: |
824
|
|
|
xec.shutdown(wait=False) |
825
|
|
|
|
826
|
|
|
|
827
|
|
|
def dev_dupe_cleaner(finals): |
828
|
|
|
""" |
829
|
|
|
Clean duplicate autoloader entries. |
830
|
|
|
|
831
|
|
|
:param finals: Dict of URL:content-length pairs. |
832
|
|
|
:type finals: dict(str: str) |
833
|
|
|
""" |
834
|
|
|
revo = {} |
835
|
|
|
for key, val in finals.items(): |
836
|
|
|
revo.setdefault(val, set()).add(key) |
837
|
|
|
dupelist = [val for key, val in revo.items() if len(val) > 1] |
838
|
|
|
for dupe in dupelist: |
839
|
|
|
for entry in dupe: |
840
|
|
|
if "DevAlpha" in entry: |
841
|
|
|
del finals[entry] |
842
|
|
|
return finals |
843
|
|
|
|