1
|
|
|
#!/usr/bin/env python3 |
2
|
|
|
"""This module is used for network connections; APIs, downloading, etc.""" |
3
|
|
|
|
4
|
|
|
import os # filesystem read |
5
|
|
|
import xml.etree.ElementTree # XML parsing |
6
|
|
|
import re # regexes |
7
|
|
|
import hashlib # base url creation |
8
|
|
|
import concurrent.futures # multiprocessing/threading |
9
|
|
|
import glob # pem file lookup |
10
|
|
|
import requests # downloading |
11
|
|
|
from bs4 import BeautifulSoup # scraping |
12
|
|
|
from bbarchivist import utilities # parse filesize |
13
|
|
|
from bbarchivist.bbconstants import SERVERS # lookup servers |
14
|
|
|
|
15
|
|
|
__author__ = "Thurask" |
16
|
|
|
__license__ = "WTFPL v2" |
17
|
|
|
__copyright__ = "Copyright 2015-2016 Thurask" |
18
|
|
|
|
19
|
|
|
|
20
|
|
|
def grab_pem(): |
21
|
|
|
""" |
22
|
|
|
Work with either local cacerts or system cacerts. Since cx_freeze is dumb. |
23
|
|
|
""" |
24
|
|
|
try: |
25
|
|
|
pemfile = glob.glob(os.path.join(os.getcwd(), "cacert.pem"))[0] |
26
|
|
|
except IndexError: |
27
|
|
|
return requests.certs.where() # no local cacerts |
28
|
|
|
else: |
29
|
|
|
return os.path.abspath(pemfile) # local cacerts |
30
|
|
|
|
31
|
|
|
|
32
|
|
|
def pem_wrapper(method): |
33
|
|
|
""" |
34
|
|
|
Decorator to set REQUESTS_CA_BUNDLE. |
35
|
|
|
|
36
|
|
|
:param method: Method to use. |
37
|
|
|
:type method: function |
38
|
|
|
""" |
39
|
|
|
def wrapper(*args, **kwargs): |
40
|
|
|
""" |
41
|
|
|
Set REQUESTS_CA_BUNDLE before doing function. |
42
|
|
|
""" |
43
|
|
|
os.environ["REQUESTS_CA_BUNDLE"] = grab_pem() |
44
|
|
|
return method(*args, **kwargs) |
45
|
|
|
return wrapper |
46
|
|
|
|
47
|
|
|
|
48
|
|
|
def generic_soup_parser(url): |
49
|
|
|
""" |
50
|
|
|
Get a BeautifulSoup HTML parser for some URL. |
51
|
|
|
|
52
|
|
|
:param url: The URL to check. |
53
|
|
|
:type url: str |
54
|
|
|
""" |
55
|
|
|
req = requests.get(url) |
56
|
|
|
soup = BeautifulSoup(req.content, "html.parser") |
57
|
|
|
return soup |
58
|
|
|
|
59
|
|
|
|
60
|
|
|
@pem_wrapper |
61
|
|
|
def get_length(url): |
62
|
|
|
""" |
63
|
|
|
Get content-length header from some URL. |
64
|
|
|
|
65
|
|
|
:param url: The URL to check. |
66
|
|
|
:type url: str |
67
|
|
|
""" |
68
|
|
|
if url is None: |
69
|
|
|
return 0 |
70
|
|
|
try: |
71
|
|
|
heads = requests.head(url) |
72
|
|
|
fsize = heads.headers['content-length'] |
73
|
|
|
return int(fsize) |
74
|
|
|
except requests.ConnectionError: |
75
|
|
|
return 0 |
76
|
|
|
|
77
|
|
|
|
78
|
|
|
@pem_wrapper |
79
|
|
|
def download(url, output_directory=None): |
80
|
|
|
""" |
81
|
|
|
Download file from given URL. |
82
|
|
|
|
83
|
|
|
:param url: URL to download from. |
84
|
|
|
:type url: str |
85
|
|
|
|
86
|
|
|
:param output_directory: Download folder. Default is local. |
87
|
|
|
:type output_directory: str |
88
|
|
|
""" |
89
|
|
|
if output_directory is None: |
90
|
|
|
output_directory = os.getcwd() |
91
|
|
|
lfname = url.split('/')[-1] |
92
|
|
|
sname = utilities.stripper(lfname) |
93
|
|
|
fname = os.path.join(output_directory, lfname) |
94
|
|
|
with open(fname, "wb") as file: |
95
|
|
|
req = requests.get(url, stream=True) |
96
|
|
|
clength = req.headers['content-length'] |
97
|
|
|
fsize = utilities.fsizer(clength) |
98
|
|
|
if req.status_code == 200: # 200 OK |
99
|
|
|
print("DOWNLOADING {0} [{1}]".format(sname, fsize)) |
100
|
|
|
for chunk in req.iter_content(chunk_size=1024): |
101
|
|
|
file.write(chunk) |
102
|
|
|
else: |
103
|
|
|
print("ERROR: HTTP {0} IN {1}".format(req.status_code, lfname)) |
104
|
|
|
if os.stat(fname).st_size == 0: |
105
|
|
|
os.remove(fname) |
106
|
|
|
|
107
|
|
|
def download_bootstrap(urls, outdir=None, workers=5): |
108
|
|
|
""" |
109
|
|
|
Run downloaders for each file in given URL iterable. |
110
|
|
|
|
111
|
|
|
:param urls: URLs to download. |
112
|
|
|
:type urls: list |
113
|
|
|
|
114
|
|
|
:param outdir: Download folder. Default is handled in :func:`download`. |
115
|
|
|
:type outdir: str |
116
|
|
|
|
117
|
|
|
:param workers: Number of worker processes. Default is 5. |
118
|
|
|
:type workers: int |
119
|
|
|
""" |
120
|
|
|
workers = len(urls) if len(urls) < workers else workers |
121
|
|
|
spinman = utilities.SpinManager() |
122
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as xec: |
123
|
|
|
try: |
124
|
|
|
spinman.start() |
125
|
|
|
for url in urls: |
126
|
|
|
xec.submit(download, url, outdir) |
127
|
|
|
except (KeyboardInterrupt, SystemExit): |
128
|
|
|
xec.shutdown() |
129
|
|
|
spinman.stop() |
130
|
|
|
spinman.stop() |
131
|
|
|
utilities.spinner_clear() |
132
|
|
|
utilities.line_begin() |
133
|
|
|
|
134
|
|
|
|
135
|
|
|
def create_base_url(softwareversion): |
136
|
|
|
""" |
137
|
|
|
Make the root URL for production server files. |
138
|
|
|
|
139
|
|
|
:param softwareversion: Software version to hash. |
140
|
|
|
:type softwareversion: str |
141
|
|
|
""" |
142
|
|
|
# Hash software version |
143
|
|
|
swhash = hashlib.sha1(softwareversion.encode('utf-8')) |
144
|
|
|
hashedsoftwareversion = swhash.hexdigest() |
145
|
|
|
# Root of all urls |
146
|
|
|
baseurl = "http://cdn.fs.sl.blackberry.com/fs/qnx/production/" + hashedsoftwareversion |
147
|
|
|
return baseurl |
148
|
|
|
|
149
|
|
|
|
150
|
|
|
@pem_wrapper |
151
|
|
|
def availability(url): |
152
|
|
|
""" |
153
|
|
|
Check HTTP status code of given URL. |
154
|
|
|
200 or 301-308 is OK, else is not. |
155
|
|
|
|
156
|
|
|
:param url: URL to check. |
157
|
|
|
:type url: str |
158
|
|
|
""" |
159
|
|
|
try: |
160
|
|
|
avlty = requests.head(url) |
161
|
|
|
status = int(avlty.status_code) |
162
|
|
|
return status == 200 or 300 < status <= 308 |
163
|
|
|
except requests.ConnectionError: |
164
|
|
|
return False |
165
|
|
|
|
166
|
|
|
|
167
|
|
|
def clean_availability(results, server): |
168
|
|
|
""" |
169
|
|
|
Clean availability for autolookup script. |
170
|
|
|
|
171
|
|
|
:param results: Result dict. |
172
|
|
|
:type results: dict(str: str) |
173
|
|
|
|
174
|
|
|
:param server: Server, key for result dict. |
175
|
|
|
:type server: str |
176
|
|
|
""" |
177
|
|
|
marker = "PD" if server == "p" else server.upper() |
178
|
|
|
rel = results[server.lower()] |
179
|
|
|
avail = marker if rel != "SR not in system" and rel is not None else " " |
180
|
|
|
return rel, avail |
181
|
|
|
|
182
|
|
|
|
183
|
|
|
@pem_wrapper |
184
|
|
|
def carrier_checker(mcc, mnc): |
185
|
|
|
""" |
186
|
|
|
Query BlackBerry World to map a MCC and a MNC to a country and carrier. |
187
|
|
|
|
188
|
|
|
:param mcc: Country code. |
189
|
|
|
:type mcc: int |
190
|
|
|
|
191
|
|
|
:param mnc: Network code. |
192
|
|
|
:type mnc: int |
193
|
|
|
""" |
194
|
|
|
url = "http://appworld.blackberry.com/ClientAPI/checkcarrier?homemcc={0}&homemnc={1}&devicevendorid=-1&pin=0".format( |
195
|
|
|
mcc, mnc) |
196
|
|
|
user_agent = {'User-agent': 'AppWorld/5.1.0.60'} |
197
|
|
|
req = requests.get(url, headers=user_agent) |
198
|
|
|
root = xml.etree.ElementTree.fromstring(req.text) |
199
|
|
|
for child in root: |
200
|
|
|
if child.tag == "country": |
201
|
|
|
country = child.get("name") |
202
|
|
|
if child.tag == "carrier": |
203
|
|
|
carrier = child.get("name") |
204
|
|
|
return country, carrier |
205
|
|
|
|
206
|
|
|
|
207
|
|
|
def return_npc(mcc, mnc): |
208
|
|
|
""" |
209
|
|
|
Format MCC and MNC into a NPC. |
210
|
|
|
|
211
|
|
|
:param mcc: Country code. |
212
|
|
|
:type mcc: int |
213
|
|
|
|
214
|
|
|
:param mnc: Network code. |
215
|
|
|
:type mnc: int |
216
|
|
|
""" |
217
|
|
|
return "{0}{1}30".format(str(mcc).zfill(3), str(mnc).zfill(3)) |
218
|
|
|
|
219
|
|
|
|
220
|
|
|
@pem_wrapper |
221
|
|
|
def carrier_query(npc, device, upgrade=False, blitz=False, forced=None): |
222
|
|
|
""" |
223
|
|
|
Query BlackBerry servers, check which update is out for a carrier. |
224
|
|
|
|
225
|
|
|
:param npc: MCC + MNC (see `func:return_npc`) |
226
|
|
|
:type npc: int |
227
|
|
|
|
228
|
|
|
:param device: Hexadecimal hardware ID. |
229
|
|
|
:type device: str |
230
|
|
|
|
231
|
|
|
:param upgrade: Whether to use upgrade files. False by default. |
232
|
|
|
:type upgrade: bool |
233
|
|
|
|
234
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
235
|
|
|
:type blitz: bool |
236
|
|
|
|
237
|
|
|
:param forced: Force a software release. |
238
|
|
|
:type forced: str |
239
|
|
|
""" |
240
|
|
|
upg = "upgrade" if upgrade else "repair" |
241
|
|
|
forced = "latest" if forced is None else forced |
242
|
|
|
url = "https://cs.sl.blackberry.com/cse/updateDetails/2.2/" |
243
|
|
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
244
|
|
|
query += '<updateDetailRequest version="2.2.1" authEchoTS="1366644680359">' |
245
|
|
|
query += "<clientProperties>" |
246
|
|
|
query += "<hardware>" |
247
|
|
|
query += "<pin>0x2FFFFFB3</pin><bsn>1128121361</bsn>" |
248
|
|
|
query += "<imei>004401139269240</imei>" |
249
|
|
|
query += "<id>0x{0}</id>".format(device) |
250
|
|
|
query += "</hardware>" |
251
|
|
|
query += "<network>" |
252
|
|
|
query += "<homeNPC>0x{0}</homeNPC>".format(npc) |
253
|
|
|
query += "<iccid>89014104255505565333</iccid>" |
254
|
|
|
query += "</network>" |
255
|
|
|
query += "<software>" |
256
|
|
|
query += "<currentLocale>en_US</currentLocale>" |
257
|
|
|
query += "<legalLocale>en_US</legalLocale>" |
258
|
|
|
query += "</software>" |
259
|
|
|
query += "</clientProperties>" |
260
|
|
|
query += "<updateDirectives>" |
261
|
|
|
query += '<allowPatching type="REDBEND">true</allowPatching>' |
262
|
|
|
query += "<upgradeMode>{0}</upgradeMode>".format(upg) |
263
|
|
|
query += "<provideDescriptions>false</provideDescriptions>" |
264
|
|
|
query += "<provideFiles>true</provideFiles>" |
265
|
|
|
query += "<queryType>NOTIFICATION_CHECK</queryType>" |
266
|
|
|
query += "</updateDirectives>" |
267
|
|
|
query += "<pollType>manual</pollType>" |
268
|
|
|
query += "<resultPackageSetCriteria>" |
269
|
|
|
query += '<softwareRelease softwareReleaseVersion="{0}" />'.format(forced) |
270
|
|
|
query += "<releaseIndependent>" |
271
|
|
|
query += '<packageType operation="include">application</packageType>' |
272
|
|
|
query += "</releaseIndependent>" |
273
|
|
|
query += "</resultPackageSetCriteria>" |
274
|
|
|
query += "</updateDetailRequest>" |
275
|
|
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
276
|
|
|
req = requests.post(url, headers=header, data=query) |
277
|
|
|
return parse_carrier_xml(req.text, blitz) |
278
|
|
|
|
279
|
|
|
|
280
|
|
|
def carrier_swver_get(root): |
281
|
|
|
""" |
282
|
|
|
Get software release from carrier XML. |
283
|
|
|
|
284
|
|
|
:param root: ElementTree we're barking up. |
285
|
|
|
:type root: xml.etree.ElementTree.ElementTree |
286
|
|
|
""" |
287
|
|
|
for child in root.iter("softwareReleaseMetadata"): |
288
|
|
|
swver = child.get("softwareReleaseVersion") |
289
|
|
|
return swver |
290
|
|
|
|
291
|
|
|
|
292
|
|
|
def carrier_child_fileappend(child, files, baseurl, blitz=False): |
293
|
|
|
""" |
294
|
|
|
Append bar file links to a list from a child element. |
295
|
|
|
|
296
|
|
|
:param child: Child element in use. |
297
|
|
|
:type child: xml.etree.ElementTree.Element |
298
|
|
|
|
299
|
|
|
:param files: Filelist. |
300
|
|
|
:type files: list(str) |
301
|
|
|
|
302
|
|
|
:param baseurl: Base URL, URL minus the filename. |
303
|
|
|
:type baseurl: str |
304
|
|
|
|
305
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
306
|
|
|
:type blitz: bool |
307
|
|
|
""" |
308
|
|
|
if not blitz: |
309
|
|
|
files.append(baseurl + child.get("path")) |
310
|
|
|
else: |
311
|
|
|
if child.get("type") not in ["system:radio", "system:desktop", "system:os"]: |
312
|
|
|
files.append(baseurl + child.get("path")) |
313
|
|
|
return files |
314
|
|
|
|
315
|
|
|
|
316
|
|
|
def carrier_child_finder(root, files, baseurl, blitz=False): |
317
|
|
|
""" |
318
|
|
|
Extract filenames, radio and OS from child elements. |
319
|
|
|
|
320
|
|
|
:param root: ElementTree we're barking up. |
321
|
|
|
:type root: xml.etree.ElementTree.ElementTree |
322
|
|
|
|
323
|
|
|
:param files: Filelist. |
324
|
|
|
:type files: list(str) |
325
|
|
|
|
326
|
|
|
:param baseurl: Base URL, URL minus the filename. |
327
|
|
|
:type baseurl: str |
328
|
|
|
|
329
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
330
|
|
|
:type blitz: bool |
331
|
|
|
""" |
332
|
|
|
osver = radver = "" |
333
|
|
|
for child in root.iter("package"): |
334
|
|
|
files = carrier_child_fileappend(child, files, baseurl, blitz) |
335
|
|
|
if child.get("type") == "system:radio": |
336
|
|
|
radver = child.get("version") |
337
|
|
|
elif child.get("type") == "system:desktop": |
338
|
|
|
osver = child.get("version") |
339
|
|
|
elif child.get("type") == "system:os": |
340
|
|
|
osver = child.get("version") |
341
|
|
|
return osver, radver, files |
342
|
|
|
|
343
|
|
|
|
344
|
|
|
def parse_carrier_xml(data, blitz=False): |
345
|
|
|
""" |
346
|
|
|
Parse the response to a carrier update request and return the juicy bits. |
347
|
|
|
|
348
|
|
|
:param data: The data to parse. |
349
|
|
|
:type data: xml |
350
|
|
|
|
351
|
|
|
:param blitz: Whether or not to create a blitz package. False by default. |
352
|
|
|
:type blitz: bool |
353
|
|
|
""" |
354
|
|
|
root = xml.etree.ElementTree.fromstring(data) |
355
|
|
|
sw_exists = root.find('./data/content/softwareReleaseMetadata') |
356
|
|
|
swver = "N/A" if sw_exists is None else "" |
357
|
|
|
if sw_exists is not None: |
358
|
|
|
swver = carrier_swver_get(root) |
359
|
|
|
files = [] |
360
|
|
|
package_exists = root.find('./data/content/fileSets/fileSet') |
361
|
|
|
osver = radver = "" |
362
|
|
|
if package_exists is not None: |
363
|
|
|
baseurl = "{0}/".format(package_exists.get("url")) |
364
|
|
|
osver, radver, files = carrier_child_finder(root, files, baseurl, blitz) |
365
|
|
|
return(swver, osver, radver, files) |
366
|
|
|
|
367
|
|
|
|
368
|
|
|
@pem_wrapper |
369
|
|
|
def sr_lookup(osver, server): |
370
|
|
|
""" |
371
|
|
|
Software release lookup, with choice of server. |
372
|
|
|
:data:`bbarchivist.bbconstants.SERVERLIST` for server list. |
373
|
|
|
|
374
|
|
|
:param osver: OS version to lookup, 10.x.y.zzzz. |
375
|
|
|
:type osver: str |
376
|
|
|
|
377
|
|
|
:param server: Server to use. |
378
|
|
|
:type server: str |
379
|
|
|
""" |
380
|
|
|
reg = re.compile(r"(\d{1,4}\.)(\d{1,4}\.)(\d{1,4}\.)(\d{1,4})") |
381
|
|
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
382
|
|
|
query += '<srVersionLookupRequest version="2.0.0"' |
383
|
|
|
query += ' authEchoTS="1366644680359">' |
384
|
|
|
query += '<clientProperties><hardware>' |
385
|
|
|
query += '<pin>0x2FFFFFB3</pin><bsn>1140011878</bsn>' |
386
|
|
|
query += '<imei>004402242176786</imei><id>0x8D00240A</id>' |
387
|
|
|
query += '<isBootROMSecure>true</isBootROMSecure>' |
388
|
|
|
query += '</hardware>' |
389
|
|
|
query += '<network>' |
390
|
|
|
query += '<vendorId>0x0</vendorId><homeNPC>0x60</homeNPC>' |
391
|
|
|
query += '<currentNPC>0x60</currentNPC><ecid>0x1</ecid>' |
392
|
|
|
query += '</network>' |
393
|
|
|
query += '<software><currentLocale>en_US</currentLocale>' |
394
|
|
|
query += '<legalLocale>en_US</legalLocale>' |
395
|
|
|
query += '<osVersion>{0}</osVersion>'.format(osver) |
396
|
|
|
query += '<omadmEnabled>false</omadmEnabled>' |
397
|
|
|
query += '</software></clientProperties>' |
398
|
|
|
query += '</srVersionLookupRequest>' |
399
|
|
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
400
|
|
|
try: |
401
|
|
|
req = requests.post(server, headers=header, data=query, timeout=1) |
402
|
|
|
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError): |
403
|
|
|
return "SR not in system" |
404
|
|
|
try: |
405
|
|
|
root = xml.etree.ElementTree.fromstring(req.text) |
406
|
|
|
except xml.etree.ElementTree.ParseError: |
407
|
|
|
return "SR not in system" |
408
|
|
|
else: |
409
|
|
|
packages = root.findall('./data/content/') |
410
|
|
|
for package in packages: |
411
|
|
|
if package.text is not None: |
412
|
|
|
match = reg.match(package.text) |
413
|
|
|
if match: |
414
|
|
|
return package.text |
415
|
|
|
else: |
416
|
|
|
return "SR not in system" |
417
|
|
|
|
418
|
|
|
|
419
|
|
|
def sr_lookup_bootstrap(osv): |
420
|
|
|
""" |
421
|
|
|
Run lookups for each server for given OS. |
422
|
|
|
|
423
|
|
|
:param osv: OS to check. |
424
|
|
|
:type osv: str |
425
|
|
|
""" |
426
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as xec: |
427
|
|
|
try: |
428
|
|
|
results = { |
429
|
|
|
"p": None, |
430
|
|
|
"a1": None, |
431
|
|
|
"a2": None, |
432
|
|
|
"b1": None, |
433
|
|
|
"b2": None |
434
|
|
|
} |
435
|
|
|
for key in results: |
436
|
|
|
results[key] = xec.submit(sr_lookup, osv, SERVERS[key]).result() |
437
|
|
|
return results |
438
|
|
|
except KeyboardInterrupt: |
439
|
|
|
xec.shutdown(wait=False) |
440
|
|
|
|
441
|
|
|
|
442
|
|
|
@pem_wrapper |
443
|
|
|
def available_bundle_lookup(mcc, mnc, device): |
444
|
|
|
""" |
445
|
|
|
Check which software releases were ever released for a carrier. |
446
|
|
|
|
447
|
|
|
:param mcc: Country code. |
448
|
|
|
:type mcc: int |
449
|
|
|
|
450
|
|
|
:param mnc: Network code. |
451
|
|
|
:type mnc: int |
452
|
|
|
|
453
|
|
|
:param device: Hexadecimal hardware ID. |
454
|
|
|
:type device: str |
455
|
|
|
""" |
456
|
|
|
server = "https://cs.sl.blackberry.com/cse/availableBundles/1.0.0/" |
457
|
|
|
npc = return_npc(mcc, mnc) |
458
|
|
|
query = '<?xml version="1.0" encoding="UTF-8"?>' |
459
|
|
|
query += '<availableBundlesRequest version="1.0.0" ' |
460
|
|
|
query += 'authEchoTS="1366644680359">' |
461
|
|
|
query += '<deviceId><pin>0x2FFFFFB3</pin></deviceId>' |
462
|
|
|
query += '<clientProperties><hardware><id>0x{0}</id>'.format(device) |
463
|
|
|
query += '<isBootROMSecure>true</isBootROMSecure></hardware>' |
464
|
|
|
query += '<network><vendorId>0x0</vendorId><homeNPC>0x{0}</homeNPC>'.format(npc) |
465
|
|
|
query += '<currentNPC>0x{0}</currentNPC></network><software>'.format(npc) |
466
|
|
|
query += '<currentLocale>en_US</currentLocale>' |
467
|
|
|
query += '<legalLocale>en_US</legalLocale>' |
468
|
|
|
query += '<osVersion>10.0.0.0</osVersion>' |
469
|
|
|
query += '<radioVersion>10.0.0.0</radioVersion></software>' |
470
|
|
|
query += '</clientProperties><updateDirectives><bundleVersionFilter>' |
471
|
|
|
query += '</bundleVersionFilter></updateDirectives>' |
472
|
|
|
query += '</availableBundlesRequest>' |
473
|
|
|
header = {"Content-Type": "text/xml;charset=UTF-8"} |
474
|
|
|
req = requests.post(server, headers=header, data=query) |
475
|
|
|
root = xml.etree.ElementTree.fromstring(req.text) |
476
|
|
|
package = root.find('./data/content') |
477
|
|
|
bundlelist = [child.attrib["version"] for child in package] |
478
|
|
|
return bundlelist |
479
|
|
|
|
480
|
|
|
|
481
|
|
|
@pem_wrapper |
482
|
|
|
def ptcrb_scraper(ptcrbid): |
483
|
|
|
""" |
484
|
|
|
Get the PTCRB results for a given device. |
485
|
|
|
|
486
|
|
|
:param ptcrbid: Numerical ID from PTCRB (end of URL). |
487
|
|
|
:type ptcrbid: str |
488
|
|
|
""" |
489
|
|
|
baseurl = "https://ptcrb.com/vendor/complete/view_complete_request_guest.cfm?modelid={0}".format( |
490
|
|
|
ptcrbid) |
491
|
|
|
soup = generic_soup_parser(baseurl) |
492
|
|
|
text = soup.get_text() |
493
|
|
|
text = text.replace("\r\n", " ") |
494
|
|
|
prelimlist = re.findall("OS .+[^\\n]", text, re.IGNORECASE) |
495
|
|
|
if not prelimlist: # Priv |
496
|
|
|
prelimlist = re.findall(r"[A-Z]{3}[0-9]{3}[\s]", text) |
497
|
|
|
cleanlist = [] |
498
|
|
|
for item in prelimlist: |
499
|
|
|
if not item.endswith("\r\n"): # they should hire QC people... |
500
|
|
|
cleanlist.append(ptcrb_item_cleaner(item)) |
501
|
|
|
return cleanlist |
502
|
|
|
|
503
|
|
|
|
504
|
|
|
def ptcrb_item_cleaner(item): |
505
|
|
|
""" |
506
|
|
|
Cleanup poorly formatted PTCRB entries written by an intern. |
507
|
|
|
|
508
|
|
|
:param item: The item to clean. |
509
|
|
|
:type item: str |
510
|
|
|
""" |
511
|
|
|
item = item.replace("<td>", "") |
512
|
|
|
item = item.replace("</td>", "") |
513
|
|
|
item = item.replace("\n", "") |
514
|
|
|
item = item.replace(" (SR", ", SR") |
515
|
|
|
item = re.sub(r"\s?\((.*)$", "", item) |
516
|
|
|
item = re.sub(r"\sSV.*$", "", item) |
517
|
|
|
item = item.replace(")", "") |
518
|
|
|
item = item.replace(". ", ".") |
519
|
|
|
item = item.replace(";", "") |
520
|
|
|
item = item.replace("version", "Version") |
521
|
|
|
item = item.replace("Verison", "Version") |
522
|
|
|
if item.count("OS") > 1: |
523
|
|
|
templist = item.split("OS") |
524
|
|
|
templist[0] = "OS" |
525
|
|
|
item = "".join([templist[0], templist[1]]) |
526
|
|
|
item = item.replace("SR", "SW Release") |
527
|
|
|
item = item.replace(" Version:", ":") |
528
|
|
|
item = item.replace("Version ", " ") |
529
|
|
|
item = item.replace(":1", ": 1") |
530
|
|
|
item = item.replace(", ", " ") |
531
|
|
|
item = item.replace("Software", "SW") |
532
|
|
|
item = item.replace(" ", " ") |
533
|
|
|
item = item.replace("OS ", "OS: ") |
534
|
|
|
item = item.replace("Radio ", "Radio: ") |
535
|
|
|
item = item.replace("Release ", "Release: ") |
536
|
|
|
spaclist = item.split(" ") |
537
|
|
|
if len(spaclist) > 1: |
538
|
|
|
while len(spaclist[1]) < 11: |
539
|
|
|
spaclist[1] += " " |
540
|
|
|
while len(spaclist[3]) < 11: |
541
|
|
|
spaclist[3] += " " |
542
|
|
|
else: |
543
|
|
|
spaclist.insert(0, "OS:") |
544
|
|
|
item = " ".join(spaclist) |
545
|
|
|
item = item.strip() |
546
|
|
|
return item |
547
|
|
|
|
548
|
|
|
|
549
|
|
|
@pem_wrapper |
550
|
|
|
def kernel_scraper(utils=False): |
551
|
|
|
""" |
552
|
|
|
Scrape BlackBerry's GitHub kernel repo for available branches. |
553
|
|
|
|
554
|
|
|
:param utils: Check android-utils repo instead of android-linux-kernel. Default is False. |
555
|
|
|
:type utils: bool |
556
|
|
|
""" |
557
|
|
|
repo = "android-utils" if utils else "android-linux-kernel" |
558
|
|
|
kernlist = [] |
559
|
|
|
for page in range(1, 10): |
560
|
|
|
url = "https://github.com/blackberry/{0}/branches/all?page={1}".format(repo, page) |
561
|
|
|
soup = generic_soup_parser(url) |
562
|
|
|
if soup.find("div", {"class": "no-results-message"}): |
563
|
|
|
break |
564
|
|
|
else: |
565
|
|
|
text = soup.get_text() |
566
|
|
|
kernlist.extend(re.findall(r"msm[0-9]{4}\/[A-Z0-9]{6}", text, re.IGNORECASE)) |
567
|
|
|
return kernlist |
568
|
|
|
|
569
|
|
|
|
570
|
|
|
def root_generator(folder, build, variant="common"): |
571
|
|
|
""" |
572
|
|
|
Generate roots for the SHAxxx hash lookup URLs. |
573
|
|
|
|
574
|
|
|
:param folder: Dictionary of variant: loader name pairs. |
575
|
|
|
:type folder: dict(str: str) |
576
|
|
|
|
577
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
578
|
|
|
:type build: str |
579
|
|
|
|
580
|
|
|
:param variant: Autoloader variant. Default is "common". |
581
|
|
|
:type variant: str |
582
|
|
|
""" |
583
|
|
|
#Priv specific |
584
|
|
|
privx = "bbfoundation/hashfiles_priv/{0}".format(folder[variant]) |
585
|
|
|
#DTEK50 specific |
586
|
|
|
dtek50x = "bbSupport/DTEK50" if build[:3] == "AAF" else "bbfoundation/hashfiles_priv/dtek50" |
587
|
|
|
#Pack it up |
588
|
|
|
roots = {"Priv": privx, "DTEK50": dtek50x} |
589
|
|
|
return roots |
590
|
|
|
|
591
|
|
|
|
592
|
|
|
def make_droid_skeleton(method, build, device, variant="common"): |
593
|
|
|
""" |
594
|
|
|
Make an Android autoloader/hash URL. |
595
|
|
|
|
596
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
597
|
|
|
:type method: str |
598
|
|
|
|
599
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
600
|
|
|
:type build: str |
601
|
|
|
|
602
|
|
|
:param device: Device to check. |
603
|
|
|
:type device: str |
604
|
|
|
|
605
|
|
|
:param variant: Autoloader variant. Default is "common". |
606
|
|
|
:type variant: str |
607
|
|
|
""" |
608
|
|
|
folder = {"vzw-vzw": "verizon", "na-att": "att", "na-tmo": "tmo", "common": "default"} |
609
|
|
|
devices = {"Priv": "qc8992", "DTEK50": "qc8952_64_sfi"} |
610
|
|
|
roots = root_generator(folder, build, variant) |
611
|
|
|
base = "bbry_{2}_autoloader_user-{0}-{1}".format(variant, build.upper(), devices[device]) |
612
|
|
|
if method is None: |
613
|
|
|
skel = "https://bbapps.download.blackberry.com/Priv/{0}.zip".format(base) |
614
|
|
|
else: |
615
|
|
|
skel = "http://ca.blackberry.com/content/dam/{1}/{0}.{2}sum".format(base, roots[device], method.lower()) |
616
|
|
|
return skel |
617
|
|
|
|
618
|
|
|
|
619
|
|
|
def bulk_droid_skeletons(devs, build, method=None): |
620
|
|
|
""" |
621
|
|
|
Prepare list of Android autoloader/hash URLs. |
622
|
|
|
|
623
|
|
|
:param devs: List of devices. |
624
|
|
|
:type devs: list(str) |
625
|
|
|
|
626
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
627
|
|
|
:type build: str |
628
|
|
|
|
629
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
630
|
|
|
:type method: str |
631
|
|
|
""" |
632
|
|
|
carrier_variants = ("common", "vzw-vzw", "na-tmo", "na-att") # device variants |
633
|
|
|
common_variants = ("common", ) # no Americans |
634
|
|
|
carrier_devices = ("Priv", ) # may this list never expand in the future |
635
|
|
|
skels = [] |
636
|
|
|
for dev in devs: |
637
|
|
|
varlist = carrier_variants if dev in carrier_devices else common_variants |
638
|
|
|
for var in varlist: |
639
|
|
|
skel = make_droid_skeleton(method, build, dev, var) |
640
|
|
|
skels.append(skel) |
641
|
|
|
return skels |
642
|
|
|
|
643
|
|
|
|
644
|
|
|
def prepare_droid_list(device): |
645
|
|
|
""" |
646
|
|
|
Convert single devices to a list, if necessary. |
647
|
|
|
|
648
|
|
|
:param device: Device to check. |
649
|
|
|
:type device: str |
650
|
|
|
""" |
651
|
|
|
if isinstance(device, list): |
652
|
|
|
devs = device |
653
|
|
|
else: |
654
|
|
|
devs = [] |
655
|
|
|
devs.append(device) |
656
|
|
|
return devs |
657
|
|
|
|
658
|
|
|
|
659
|
|
|
def droid_scanner(build, device, method=None): |
660
|
|
|
""" |
661
|
|
|
Check for Android autoloaders on BlackBerry's site. |
662
|
|
|
|
663
|
|
|
:param build: Build to check, 3 letters + 3 numbers. |
664
|
|
|
:type build: str |
665
|
|
|
|
666
|
|
|
:param device: Device to check. |
667
|
|
|
:type device: str |
668
|
|
|
|
669
|
|
|
:param method: None for regular OS links, "sha256/512" for SHA256 or 512 hash. |
670
|
|
|
:type method: str |
671
|
|
|
""" |
672
|
|
|
devs = prepare_droid_list(device) |
673
|
|
|
skels = bulk_droid_skeletons(devs, build, method) |
674
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=len(skels)) as xec: |
675
|
|
|
results = [] |
676
|
|
|
for skel in skels: |
677
|
|
|
avail = xec.submit(availability, skel) |
678
|
|
|
if avail.result(): |
679
|
|
|
results.append(skel) |
680
|
|
|
return results if results else None |
681
|
|
|
|
682
|
|
|
|
683
|
|
|
def chunker(iterable, inc): |
684
|
|
|
""" |
685
|
|
|
Convert an iterable into a list of inc sized lists. |
686
|
|
|
|
687
|
|
|
:param chunker: Iterable to chunk. |
688
|
|
|
:type chunker: list |
689
|
|
|
|
690
|
|
|
:param inc: Increment; how big each chunk is. |
691
|
|
|
:type inc: int |
692
|
|
|
""" |
693
|
|
|
chunks = [iterable[x:x+inc] for x in range(0, len(iterable), inc)] |
694
|
|
|
return chunks |
695
|
|
|
|
696
|
|
|
|
697
|
|
|
def unicode_filter(intext): |
698
|
|
|
""" |
699
|
|
|
Remove Unicode crap. |
700
|
|
|
|
701
|
|
|
:param intext: Text to filter. |
702
|
|
|
:type intext: str |
703
|
|
|
""" |
704
|
|
|
return intext.replace("\u2013", "").strip() |
705
|
|
|
|
706
|
|
|
|
707
|
|
|
def table_headers(pees): |
708
|
|
|
""" |
709
|
|
|
Generate table headers from list of p tags. |
710
|
|
|
|
711
|
|
|
:param pees: List of p tags. |
712
|
|
|
:type pees: list(bs4.element.Tag) |
713
|
|
|
""" |
714
|
|
|
bolds = [x for x in pees if x.find("b") and "BlackBerry" in x.text and not "experts" in x.text] |
715
|
|
|
bolds = [x.text for x in bolds] |
716
|
|
|
return bolds |
717
|
|
|
|
718
|
|
|
|
719
|
|
|
@pem_wrapper |
720
|
|
|
def loader_page_scraper(): |
721
|
|
|
""" |
722
|
|
|
Return scraped autoloader page. |
723
|
|
|
""" |
724
|
|
|
url = "http://ca.blackberry.com/content/blackberry-com/en_ca/support/smartphones/Android-OS-Reload.html" |
725
|
|
|
soup = generic_soup_parser(url) |
726
|
|
|
tables = soup.find_all("table") |
727
|
|
|
headers = table_headers(soup.find_all("p")) |
728
|
|
|
for idx, table in enumerate(tables): |
729
|
|
|
print("~~~{0}~~~".format(headers[idx])) |
730
|
|
|
chunks = chunker(table.find_all("td"), 4) |
731
|
|
|
for chunk in chunks: |
732
|
|
|
key = unicode_filter(chunk[0].text) |
733
|
|
|
ver = unicode_filter(chunk[1].text) |
734
|
|
|
link = unicode_filter(chunk[2].find("a")["href"]) |
735
|
|
|
print("{0}\n {1}: {2}".format(key, ver, link)) |
736
|
|
|
print(" ") |
737
|
|
|
|
738
|
|
|
|
739
|
|
|
@pem_wrapper |
740
|
|
|
def base_metadata(url): |
741
|
|
|
""" |
742
|
|
|
Get BBNDK metadata, base function. |
743
|
|
|
""" |
744
|
|
|
req = requests.get(url) |
745
|
|
|
data = req.content |
746
|
|
|
entries = data.split(b"\n") |
747
|
|
|
metadata = [entry.split(b",")[1].decode("utf-8") for entry in entries if entry] |
748
|
|
|
return metadata |
749
|
|
|
|
750
|
|
|
|
751
|
|
|
def ndk_metadata(): |
752
|
|
|
""" |
753
|
|
|
Get BBNDK target metadata. |
754
|
|
|
""" |
755
|
|
|
data = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/metadata") |
756
|
|
|
metadata = [entry for entry in data if entry.startswith(("10.0", "10.1", "10.2"))] |
757
|
|
|
return metadata |
758
|
|
|
|
759
|
|
|
|
760
|
|
|
def sim_metadata(): |
761
|
|
|
""" |
762
|
|
|
Get BBNDK simulator metadata. |
763
|
|
|
""" |
764
|
|
|
metadata = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/simulator/simulator_metadata") |
765
|
|
|
return metadata |
766
|
|
|
|
767
|
|
|
|
768
|
|
|
def runtime_metadata(): |
769
|
|
|
""" |
770
|
|
|
Get BBNDK runtime metadata. |
771
|
|
|
""" |
772
|
|
|
metadata = base_metadata("http://downloads.blackberry.com/upr/developers/update/bbndk/runtime/runtime_metadata") |
773
|
|
|
return metadata |
774
|
|
|
|
775
|
|
|
|
776
|
|
|
def series_generator(osversion): |
777
|
|
|
""" |
778
|
|
|
Generate series/branch name from OS version. |
779
|
|
|
|
780
|
|
|
:param osversion: OS version. |
781
|
|
|
:type osversion: str |
782
|
|
|
""" |
783
|
|
|
splits = osversion.split(".") |
784
|
|
|
return "BB{0}_{1}_{2}".format(*splits[0:3]) |
785
|
|
|
|
786
|
|
|
|
787
|
|
|
@pem_wrapper |
788
|
|
|
def devalpha_urls(osversion, skel): |
789
|
|
|
""" |
790
|
|
|
Check individual Dev Alpha autoloader URLs. |
791
|
|
|
|
792
|
|
|
:param osversion: OS version. |
793
|
|
|
:type osversion: str |
794
|
|
|
|
795
|
|
|
:param skel: Individual skeleton format to try. |
796
|
|
|
:type skel: str |
797
|
|
|
""" |
798
|
|
|
url = "http://downloads.blackberry.com/upr/developers/downloads/{0}{1}.exe".format(skel, osversion) |
799
|
|
|
req = requests.head(url) |
800
|
|
|
if req.status_code == 200: |
801
|
|
|
finals = (url, req.headers["content-length"]) |
802
|
|
|
else: |
803
|
|
|
finals = () |
804
|
|
|
return finals |
805
|
|
|
|
806
|
|
|
|
807
|
|
|
def devalpha_urls_bootstrap(osversion, skeletons): |
808
|
|
|
""" |
809
|
|
|
Get list of valid Dev Alpha autoloader URLs. |
810
|
|
|
|
811
|
|
|
:param osversion: OS version. |
812
|
|
|
:type osversion: str |
813
|
|
|
|
814
|
|
|
:param skeletons: List of skeleton formats to try. |
815
|
|
|
:type skeletons: list |
816
|
|
|
""" |
817
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as xec: |
818
|
|
|
try: |
819
|
|
|
finals = {} |
820
|
|
|
skels = skeletons |
821
|
|
|
for idx, skel in enumerate(skeletons): |
822
|
|
|
if "<SERIES>" in skel: |
823
|
|
|
skels[idx] = skel.replace("<SERIES>", series_generator(osversion)) |
824
|
|
|
for skel in skels: |
825
|
|
|
final = xec.submit(devalpha_urls, osversion, skel).result() |
826
|
|
|
if final: |
827
|
|
|
finals[final[0]] = final[1] |
828
|
|
|
return finals |
829
|
|
|
except KeyboardInterrupt: |
830
|
|
|
xec.shutdown(wait=False) |
831
|
|
|
|
832
|
|
|
|
833
|
|
|
def dev_dupe_cleaner(finals): |
834
|
|
|
""" |
835
|
|
|
Clean duplicate autoloader entries. |
836
|
|
|
|
837
|
|
|
:param finals: Dict of URL:content-length pairs. |
838
|
|
|
:type finals: dict(str: str) |
839
|
|
|
""" |
840
|
|
|
revo = {} |
841
|
|
|
for key, val in finals.items(): |
842
|
|
|
revo.setdefault(val, set()).add(key) |
843
|
|
|
dupelist = [val for key, val in revo.items() if len(val) > 1] |
844
|
|
|
for dupe in dupelist: |
845
|
|
|
for entry in dupe: |
846
|
|
|
if "DevAlpha" in entry: |
847
|
|
|
del finals[entry] |
848
|
|
|
return finals |
849
|
|
|
|