1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# pylast - |
4
|
|
|
# A Python interface to Last.fm and Libre.fm |
5
|
|
|
# |
6
|
|
|
# Copyright 2008-2010 Amr Hassan |
7
|
|
|
# Copyright 2013-2016 hugovk |
8
|
|
|
# |
9
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
10
|
|
|
# you may not use this file except in compliance with the License. |
11
|
|
|
# You may obtain a copy of the License at |
12
|
|
|
# |
13
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
14
|
|
|
# |
15
|
|
|
# Unless required by applicable law or agreed to in writing, software |
16
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
17
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
18
|
|
|
# See the License for the specific language governing permissions and |
19
|
|
|
# limitations under the License. |
20
|
|
|
# |
21
|
|
|
# https://github.com/pylast/pylast |
22
|
|
|
|
23
|
|
|
import hashlib |
24
|
|
|
from xml.dom import minidom, Node |
25
|
|
|
import xml.dom |
26
|
|
|
import time |
27
|
|
|
import shelve |
28
|
|
|
import tempfile |
29
|
|
|
import sys |
30
|
|
|
import collections |
31
|
|
|
import warnings |
32
|
|
|
import re |
33
|
|
|
import six |
34
|
|
|
|
35
|
|
|
__version__ = '1.6.0' |
36
|
|
|
__author__ = 'Amr Hassan, hugovk' |
37
|
|
|
__copyright__ = "Copyright (C) 2008-2010 Amr Hassan, 2013-2016 hugovk" |
38
|
|
|
__license__ = "apache2" |
39
|
|
|
__email__ = '[email protected]' |
40
|
|
|
|
41
|
|
|
|
42
|
|
|
def _deprecation_warning(message): |
43
|
|
|
warnings.warn(message, DeprecationWarning) |
44
|
|
|
|
45
|
|
|
if sys.version_info[0] == 3: |
46
|
|
|
from http.client import HTTPConnection |
47
|
|
|
import html.entities as htmlentitydefs |
48
|
|
|
from urllib.parse import splithost as url_split_host |
49
|
|
|
from urllib.parse import quote_plus as url_quote_plus |
50
|
|
|
|
51
|
|
|
unichr = chr |
52
|
|
|
|
53
|
|
|
elif sys.version_info[0] == 2: |
54
|
|
|
from httplib import HTTPConnection |
55
|
|
|
import htmlentitydefs |
56
|
|
|
from urllib import splithost as url_split_host |
57
|
|
|
from urllib import quote_plus as url_quote_plus |
58
|
|
|
|
59
|
|
|
STATUS_INVALID_SERVICE = 2 |
60
|
|
|
STATUS_INVALID_METHOD = 3 |
61
|
|
|
STATUS_AUTH_FAILED = 4 |
62
|
|
|
STATUS_INVALID_FORMAT = 5 |
63
|
|
|
STATUS_INVALID_PARAMS = 6 |
64
|
|
|
STATUS_INVALID_RESOURCE = 7 |
65
|
|
|
STATUS_TOKEN_ERROR = 8 |
66
|
|
|
STATUS_INVALID_SK = 9 |
67
|
|
|
STATUS_INVALID_API_KEY = 10 |
68
|
|
|
STATUS_OFFLINE = 11 |
69
|
|
|
STATUS_SUBSCRIBERS_ONLY = 12 |
70
|
|
|
STATUS_INVALID_SIGNATURE = 13 |
71
|
|
|
STATUS_TOKEN_UNAUTHORIZED = 14 |
72
|
|
|
STATUS_TOKEN_EXPIRED = 15 |
73
|
|
|
|
74
|
|
|
EVENT_ATTENDING = '0' |
75
|
|
|
EVENT_MAYBE_ATTENDING = '1' |
76
|
|
|
EVENT_NOT_ATTENDING = '2' |
77
|
|
|
|
78
|
|
|
PERIOD_OVERALL = 'overall' |
79
|
|
|
PERIOD_7DAYS = '7day' |
80
|
|
|
PERIOD_1MONTH = '1month' |
81
|
|
|
PERIOD_3MONTHS = '3month' |
82
|
|
|
PERIOD_6MONTHS = '6month' |
83
|
|
|
PERIOD_12MONTHS = '12month' |
84
|
|
|
|
85
|
|
|
DOMAIN_ENGLISH = 0 |
86
|
|
|
DOMAIN_GERMAN = 1 |
87
|
|
|
DOMAIN_SPANISH = 2 |
88
|
|
|
DOMAIN_FRENCH = 3 |
89
|
|
|
DOMAIN_ITALIAN = 4 |
90
|
|
|
DOMAIN_POLISH = 5 |
91
|
|
|
DOMAIN_PORTUGUESE = 6 |
92
|
|
|
DOMAIN_SWEDISH = 7 |
93
|
|
|
DOMAIN_TURKISH = 8 |
94
|
|
|
DOMAIN_RUSSIAN = 9 |
95
|
|
|
DOMAIN_JAPANESE = 10 |
96
|
|
|
DOMAIN_CHINESE = 11 |
97
|
|
|
|
98
|
|
|
COVER_SMALL = 0 |
99
|
|
|
COVER_MEDIUM = 1 |
100
|
|
|
COVER_LARGE = 2 |
101
|
|
|
COVER_EXTRA_LARGE = 3 |
102
|
|
|
COVER_MEGA = 4 |
103
|
|
|
|
104
|
|
|
IMAGES_ORDER_POPULARITY = "popularity" |
105
|
|
|
IMAGES_ORDER_DATE = "dateadded" |
106
|
|
|
|
107
|
|
|
|
108
|
|
|
USER_MALE = 'Male' |
109
|
|
|
USER_FEMALE = 'Female' |
110
|
|
|
|
111
|
|
|
SCROBBLE_SOURCE_USER = "P" |
112
|
|
|
SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST = "R" |
113
|
|
|
SCROBBLE_SOURCE_PERSONALIZED_BROADCAST = "E" |
114
|
|
|
SCROBBLE_SOURCE_LASTFM = "L" |
115
|
|
|
SCROBBLE_SOURCE_UNKNOWN = "U" |
116
|
|
|
|
117
|
|
|
SCROBBLE_MODE_PLAYED = "" |
118
|
|
|
SCROBBLE_MODE_LOVED = "L" |
119
|
|
|
SCROBBLE_MODE_BANNED = "B" |
120
|
|
|
SCROBBLE_MODE_SKIPPED = "S" |
121
|
|
|
|
122
|
|
|
# From http://boodebr.org/main/python/all-about-python-and-unicode#UNI_XML |
123
|
|
|
RE_XML_ILLEGAL = (u'([\u0000-\u0008\u000b-\u000c\u000e-\u001f\ufffe-\uffff])' + |
124
|
|
|
u'|' + |
125
|
|
|
u'([%s-%s][^%s-%s])|([^%s-%s][%s-%s])|([%s-%s]$)|(^[%s-%s])' |
126
|
|
|
% |
127
|
|
|
(unichr(0xd800), unichr(0xdbff), unichr(0xdc00), |
128
|
|
|
unichr(0xdfff), unichr(0xd800), unichr(0xdbff), |
129
|
|
|
unichr(0xdc00), unichr(0xdfff), unichr(0xd800), |
130
|
|
|
unichr(0xdbff), unichr(0xdc00), unichr(0xdfff))) |
131
|
|
|
|
132
|
|
|
XML_ILLEGAL = re.compile(RE_XML_ILLEGAL) |
133
|
|
|
|
134
|
|
|
|
135
|
|
|
class _Network(object): |
136
|
|
|
""" |
137
|
|
|
A music social network website such as Last.fm or |
138
|
|
|
one with a Last.fm-compatible API. |
139
|
|
|
""" |
140
|
|
|
|
141
|
|
|
def __init__( |
142
|
|
|
self, name, homepage, ws_server, api_key, api_secret, session_key, |
143
|
|
|
submission_server, username, password_hash, domain_names, urls): |
144
|
|
|
""" |
145
|
|
|
name: the name of the network |
146
|
|
|
homepage: the homepage URL |
147
|
|
|
ws_server: the URL of the webservices server |
148
|
|
|
api_key: a provided API_KEY |
149
|
|
|
api_secret: a provided API_SECRET |
150
|
|
|
session_key: a generated session_key or None |
151
|
|
|
submission_server: the URL of the server to which tracks are |
152
|
|
|
submitted (scrobbled) |
153
|
|
|
username: a username of a valid user |
154
|
|
|
password_hash: the output of pylast.md5(password) where password is |
155
|
|
|
the user's password |
156
|
|
|
domain_names: a dict mapping each DOMAIN_* value to a string domain |
157
|
|
|
name |
158
|
|
|
urls: a dict mapping types to URLs |
159
|
|
|
|
160
|
|
|
if username and password_hash were provided and not session_key, |
161
|
|
|
session_key will be generated automatically when needed. |
162
|
|
|
|
163
|
|
|
Either a valid session_key or a combination of username and |
164
|
|
|
password_hash must be present for scrobbling. |
165
|
|
|
|
166
|
|
|
You should use a preconfigured network object through a |
167
|
|
|
get_*_network(...) method instead of creating an object |
168
|
|
|
of this class, unless you know what you're doing. |
169
|
|
|
""" |
170
|
|
|
|
171
|
|
|
self.name = name |
172
|
|
|
self.homepage = homepage |
173
|
|
|
self.ws_server = ws_server |
174
|
|
|
self.api_key = api_key |
175
|
|
|
self.api_secret = api_secret |
176
|
|
|
self.session_key = session_key |
177
|
|
|
self.submission_server = submission_server |
178
|
|
|
self.username = username |
179
|
|
|
self.password_hash = password_hash |
180
|
|
|
self.domain_names = domain_names |
181
|
|
|
self.urls = urls |
182
|
|
|
|
183
|
|
|
self.cache_backend = None |
184
|
|
|
self.proxy_enabled = False |
185
|
|
|
self.proxy = None |
186
|
|
|
self.last_call_time = 0 |
187
|
|
|
self.limit_rate = False |
188
|
|
|
|
189
|
|
|
# Generate a session_key if necessary |
190
|
|
|
if ((self.api_key and self.api_secret) and not self.session_key and |
191
|
|
|
(self.username and self.password_hash)): |
192
|
|
|
sk_gen = SessionKeyGenerator(self) |
193
|
|
|
self.session_key = sk_gen.get_session_key( |
194
|
|
|
self.username, self.password_hash) |
195
|
|
|
|
196
|
|
|
def __str__(self): |
197
|
|
|
return "%s Network" % self.name |
198
|
|
|
|
199
|
|
|
def get_artist(self, artist_name): |
200
|
|
|
""" |
201
|
|
|
Return an Artist object |
202
|
|
|
""" |
203
|
|
|
|
204
|
|
|
return Artist(artist_name, self) |
205
|
|
|
|
206
|
|
|
def get_track(self, artist, title): |
207
|
|
|
""" |
208
|
|
|
Return a Track object |
209
|
|
|
""" |
210
|
|
|
|
211
|
|
|
return Track(artist, title, self) |
212
|
|
|
|
213
|
|
|
def get_album(self, artist, title): |
214
|
|
|
""" |
215
|
|
|
Return an Album object |
216
|
|
|
""" |
217
|
|
|
|
218
|
|
|
return Album(artist, title, self) |
219
|
|
|
|
220
|
|
|
def get_authenticated_user(self): |
221
|
|
|
""" |
222
|
|
|
Returns the authenticated user |
223
|
|
|
""" |
224
|
|
|
|
225
|
|
|
return AuthenticatedUser(self) |
226
|
|
|
|
227
|
|
|
def get_country(self, country_name): |
228
|
|
|
""" |
229
|
|
|
Returns a country object |
230
|
|
|
""" |
231
|
|
|
|
232
|
|
|
return Country(country_name, self) |
233
|
|
|
|
234
|
|
|
def get_metro(self, metro_name, country_name): |
235
|
|
|
""" |
236
|
|
|
Returns a metro object |
237
|
|
|
""" |
238
|
|
|
|
239
|
|
|
return Metro(metro_name, country_name, self) |
240
|
|
|
|
241
|
|
|
def get_group(self, name): |
242
|
|
|
""" |
243
|
|
|
Returns a Group object |
244
|
|
|
""" |
245
|
|
|
|
246
|
|
|
return Group(name, self) |
247
|
|
|
|
248
|
|
|
def get_user(self, username): |
249
|
|
|
""" |
250
|
|
|
Returns a user object |
251
|
|
|
""" |
252
|
|
|
|
253
|
|
|
return User(username, self) |
254
|
|
|
|
255
|
|
|
def get_tag(self, name): |
256
|
|
|
""" |
257
|
|
|
Returns a tag object |
258
|
|
|
""" |
259
|
|
|
|
260
|
|
|
return Tag(name, self) |
261
|
|
|
|
262
|
|
|
def get_scrobbler(self, client_id, client_version): |
263
|
|
|
""" |
264
|
|
|
Returns a Scrobbler object used for submitting tracks to the server |
265
|
|
|
|
266
|
|
|
Quote from http://www.last.fm/api/submissions: |
267
|
|
|
======== |
268
|
|
|
Client identifiers are used to provide a centrally managed database |
269
|
|
|
of the client versions, allowing clients to be banned if they are |
270
|
|
|
found to be behaving undesirably. The client ID is associated with |
271
|
|
|
a version number on the server, however these are only incremented |
272
|
|
|
if a client is banned and do not have to reflect the version of the |
273
|
|
|
actual client application. |
274
|
|
|
|
275
|
|
|
During development, clients which have not been allocated an |
276
|
|
|
identifier should use the identifier tst, with a version number of |
277
|
|
|
1.0. Do not distribute code or client implementations which use |
278
|
|
|
this test identifier. Do not use the identifiers used by other |
279
|
|
|
clients. |
280
|
|
|
========= |
281
|
|
|
|
282
|
|
|
To obtain a new client identifier please contact: |
283
|
|
|
* Last.fm: [email protected] |
284
|
|
|
* # TODO: list others |
285
|
|
|
|
286
|
|
|
...and provide us with the name of your client and its homepage |
287
|
|
|
address. |
288
|
|
|
""" |
289
|
|
|
|
290
|
|
|
_deprecation_warning( |
291
|
|
|
"Use _Network.scrobble(...), _Network.scrobble_many(...)," |
292
|
|
|
" and Network.update_now_playing(...) instead") |
293
|
|
|
|
294
|
|
|
return Scrobbler(self, client_id, client_version) |
295
|
|
|
|
296
|
|
|
def _get_language_domain(self, domain_language): |
297
|
|
|
""" |
298
|
|
|
Returns the mapped domain name of the network to a DOMAIN_* value |
299
|
|
|
""" |
300
|
|
|
|
301
|
|
|
if domain_language in self.domain_names: |
302
|
|
|
return self.domain_names[domain_language] |
303
|
|
|
|
304
|
|
|
def _get_url(self, domain, url_type): |
305
|
|
|
return "http://%s/%s" % ( |
306
|
|
|
self._get_language_domain(domain), self.urls[url_type]) |
307
|
|
|
|
308
|
|
|
def _get_ws_auth(self): |
309
|
|
|
""" |
310
|
|
|
Returns an (API_KEY, API_SECRET, SESSION_KEY) tuple. |
311
|
|
|
""" |
312
|
|
|
return (self.api_key, self.api_secret, self.session_key) |
313
|
|
|
|
314
|
|
|
def _delay_call(self): |
315
|
|
|
""" |
316
|
|
|
Makes sure that web service calls are at least 0.2 seconds apart. |
317
|
|
|
""" |
318
|
|
|
|
319
|
|
|
# Delay time in seconds from section 4.4 of http://www.last.fm/api/tos |
320
|
|
|
DELAY_TIME = 0.2 |
321
|
|
|
now = time.time() |
322
|
|
|
|
323
|
|
|
time_since_last = now - self.last_call_time |
324
|
|
|
|
325
|
|
|
if time_since_last < DELAY_TIME: |
326
|
|
|
time.sleep(DELAY_TIME - time_since_last) |
327
|
|
|
|
328
|
|
|
self.last_call_time = now |
329
|
|
|
|
330
|
|
|
def create_new_playlist(self, title, description): |
331
|
|
|
""" |
332
|
|
|
Creates a playlist for the authenticated user and returns it |
333
|
|
|
title: The title of the new playlist. |
334
|
|
|
description: The description of the new playlist. |
335
|
|
|
""" |
336
|
|
|
|
337
|
|
|
params = {} |
338
|
|
|
params['title'] = title |
339
|
|
|
params['description'] = description |
340
|
|
|
|
341
|
|
|
doc = _Request(self, 'playlist.create', params).execute(False) |
342
|
|
|
|
343
|
|
|
e_id = doc.getElementsByTagName("id")[0].firstChild.data |
344
|
|
|
user = doc.getElementsByTagName('playlists')[0].getAttribute('user') |
345
|
|
|
|
346
|
|
|
return Playlist(user, e_id, self) |
347
|
|
|
|
348
|
|
|
def get_top_artists(self, limit=None, cacheable=True): |
349
|
|
|
"""Returns the most played artists as a sequence of TopItem objects.""" |
350
|
|
|
|
351
|
|
|
params = {} |
352
|
|
|
if limit: |
353
|
|
|
params["limit"] = limit |
354
|
|
|
|
355
|
|
|
doc = _Request(self, "chart.getTopArtists", params).execute(cacheable) |
356
|
|
|
|
357
|
|
|
return _extract_top_artists(doc, self) |
358
|
|
|
|
359
|
|
View Code Duplication |
def get_top_tracks(self, limit=None, cacheable=True): |
|
|
|
|
360
|
|
|
"""Returns the most played tracks as a sequence of TopItem objects.""" |
361
|
|
|
|
362
|
|
|
params = {} |
363
|
|
|
if limit: |
364
|
|
|
params["limit"] = limit |
365
|
|
|
|
366
|
|
|
doc = _Request(self, "chart.getTopTracks", params).execute(cacheable) |
367
|
|
|
|
368
|
|
|
seq = [] |
369
|
|
|
for node in doc.getElementsByTagName("track"): |
370
|
|
|
title = _extract(node, "name") |
371
|
|
|
artist = _extract(node, "name", 1) |
372
|
|
|
track = Track(artist, title, self) |
373
|
|
|
weight = _number(_extract(node, "playcount")) |
374
|
|
|
seq.append(TopItem(track, weight)) |
375
|
|
|
|
376
|
|
|
return seq |
377
|
|
|
|
378
|
|
|
def get_top_tags(self, limit=None, cacheable=True): |
379
|
|
|
"""Returns the most used tags as a sequence of TopItem objects.""" |
380
|
|
|
|
381
|
|
|
# Last.fm has no "limit" parameter for tag.getTopTags |
382
|
|
|
# so we need to get all (250) and then limit locally |
383
|
|
|
doc = _Request(self, "tag.getTopTags").execute(cacheable) |
384
|
|
|
|
385
|
|
|
seq = [] |
386
|
|
|
for node in doc.getElementsByTagName("tag"): |
387
|
|
|
if limit and len(seq) >= limit: |
388
|
|
|
break |
389
|
|
|
tag = Tag(_extract(node, "name"), self) |
390
|
|
|
weight = _number(_extract(node, "count")) |
391
|
|
|
seq.append(TopItem(tag, weight)) |
392
|
|
|
|
393
|
|
|
return seq |
394
|
|
|
|
395
|
|
|
def get_geo_events( |
396
|
|
|
self, longitude=None, latitude=None, location=None, distance=None, |
397
|
|
|
tag=None, festivalsonly=None, limit=None, cacheable=True): |
398
|
|
|
""" |
399
|
|
|
Returns all events in a specific location by country or city name. |
400
|
|
|
Parameters: |
401
|
|
|
longitude (Optional) : Specifies a longitude value to retrieve events |
402
|
|
|
for (service returns nearby events by default) |
403
|
|
|
latitude (Optional) : Specifies a latitude value to retrieve events for |
404
|
|
|
(service returns nearby events by default) |
405
|
|
|
location (Optional) : Specifies a location to retrieve events for |
406
|
|
|
(service returns nearby events by default) |
407
|
|
|
distance (Optional) : Find events within a specified radius |
408
|
|
|
(in kilometres) |
409
|
|
|
tag (Optional) : Specifies a tag to filter by. |
410
|
|
|
festivalsonly[0|1] (Optional) : Whether only festivals should be |
411
|
|
|
returned, or all events. |
412
|
|
|
limit (Optional) : The number of results to fetch per page. |
413
|
|
|
Defaults to 10. |
414
|
|
|
""" |
415
|
|
|
|
416
|
|
|
params = {} |
417
|
|
|
|
418
|
|
|
if longitude: |
419
|
|
|
params["long"] = longitude |
420
|
|
|
if latitude: |
421
|
|
|
params["lat"] = latitude |
422
|
|
|
if location: |
423
|
|
|
params["location"] = location |
424
|
|
|
if limit: |
425
|
|
|
params["limit"] = limit |
426
|
|
|
if distance: |
427
|
|
|
params["distance"] = distance |
428
|
|
|
if tag: |
429
|
|
|
params["tag"] = tag |
430
|
|
|
if festivalsonly: |
431
|
|
|
params["festivalsonly"] = 1 |
432
|
|
|
elif not festivalsonly: |
433
|
|
|
params["festivalsonly"] = 0 |
434
|
|
|
|
435
|
|
|
doc = _Request(self, "geo.getEvents", params).execute(cacheable) |
436
|
|
|
|
437
|
|
|
return _extract_events_from_doc(doc, self) |
438
|
|
|
|
439
|
|
|
def get_metro_weekly_chart_dates(self, cacheable=True): |
440
|
|
|
""" |
441
|
|
|
Returns a list of From and To tuples for the available metro charts. |
442
|
|
|
""" |
443
|
|
|
|
444
|
|
|
doc = _Request(self, "geo.getMetroWeeklyChartlist").execute(cacheable) |
445
|
|
|
|
446
|
|
|
seq = [] |
447
|
|
|
for node in doc.getElementsByTagName("chart"): |
448
|
|
|
seq.append((node.getAttribute("from"), node.getAttribute("to"))) |
449
|
|
|
|
450
|
|
|
return seq |
451
|
|
|
|
452
|
|
|
def get_metros(self, country=None, cacheable=True): |
453
|
|
|
""" |
454
|
|
|
Get a list of valid countries and metros for use in the other |
455
|
|
|
webservices. |
456
|
|
|
Parameters: |
457
|
|
|
country (Optional) : Optionally restrict the results to those Metros |
458
|
|
|
from a particular country, as defined by the ISO 3166-1 country |
459
|
|
|
names standard. |
460
|
|
|
""" |
461
|
|
|
params = {} |
462
|
|
|
|
463
|
|
|
if country: |
464
|
|
|
params["country"] = country |
465
|
|
|
|
466
|
|
|
doc = _Request(self, "geo.getMetros", params).execute(cacheable) |
467
|
|
|
|
468
|
|
|
metros = doc.getElementsByTagName("metro") |
469
|
|
|
seq = [] |
470
|
|
|
|
471
|
|
|
for metro in metros: |
472
|
|
|
name = _extract(metro, "name") |
473
|
|
|
country = _extract(metro, "country") |
474
|
|
|
|
475
|
|
|
seq.append(Metro(name, country, self)) |
476
|
|
|
|
477
|
|
|
return seq |
478
|
|
|
|
479
|
|
|
def get_geo_top_artists(self, country, limit=None, cacheable=True): |
480
|
|
|
"""Get the most popular artists on Last.fm by country. |
481
|
|
|
Parameters: |
482
|
|
|
country (Required) : A country name, as defined by the ISO 3166-1 |
483
|
|
|
country names standard. |
484
|
|
|
limit (Optional) : The number of results to fetch per page. |
485
|
|
|
Defaults to 50. |
486
|
|
|
""" |
487
|
|
|
params = {"country": country} |
488
|
|
|
|
489
|
|
|
if limit: |
490
|
|
|
params["limit"] = limit |
491
|
|
|
|
492
|
|
|
doc = _Request(self, "geo.getTopArtists", params).execute(cacheable) |
493
|
|
|
|
494
|
|
|
return _extract_top_artists(doc, self) |
495
|
|
|
|
496
|
|
View Code Duplication |
def get_geo_top_tracks( |
|
|
|
|
497
|
|
|
self, country, location=None, limit=None, cacheable=True): |
498
|
|
|
"""Get the most popular tracks on Last.fm last week by country. |
499
|
|
|
Parameters: |
500
|
|
|
country (Required) : A country name, as defined by the ISO 3166-1 |
501
|
|
|
country names standard |
502
|
|
|
location (Optional) : A metro name, to fetch the charts for |
503
|
|
|
(must be within the country specified) |
504
|
|
|
limit (Optional) : The number of results to fetch per page. |
505
|
|
|
Defaults to 50. |
506
|
|
|
""" |
507
|
|
|
params = {"country": country} |
508
|
|
|
|
509
|
|
|
if location: |
510
|
|
|
params["location"] = location |
511
|
|
|
if limit: |
512
|
|
|
params["limit"] = limit |
513
|
|
|
|
514
|
|
|
doc = _Request(self, "geo.getTopTracks", params).execute(cacheable) |
515
|
|
|
|
516
|
|
|
tracks = doc.getElementsByTagName("track") |
517
|
|
|
seq = [] |
518
|
|
|
|
519
|
|
|
for track in tracks: |
520
|
|
|
title = _extract(track, "name") |
521
|
|
|
artist = _extract(track, "name", 1) |
522
|
|
|
listeners = _extract(track, "listeners") |
523
|
|
|
|
524
|
|
|
seq.append(TopItem(Track(artist, title, self), listeners)) |
525
|
|
|
|
526
|
|
|
return seq |
527
|
|
|
|
528
|
|
|
def enable_proxy(self, host, port): |
529
|
|
|
"""Enable a default web proxy""" |
530
|
|
|
|
531
|
|
|
self.proxy = [host, _number(port)] |
532
|
|
|
self.proxy_enabled = True |
533
|
|
|
|
534
|
|
|
def disable_proxy(self): |
535
|
|
|
"""Disable using the web proxy""" |
536
|
|
|
|
537
|
|
|
self.proxy_enabled = False |
538
|
|
|
|
539
|
|
|
def is_proxy_enabled(self): |
540
|
|
|
"""Returns True if a web proxy is enabled.""" |
541
|
|
|
|
542
|
|
|
return self.proxy_enabled |
543
|
|
|
|
544
|
|
|
def _get_proxy(self): |
545
|
|
|
"""Returns proxy details.""" |
546
|
|
|
|
547
|
|
|
return self.proxy |
548
|
|
|
|
549
|
|
|
def enable_rate_limit(self): |
550
|
|
|
"""Enables rate limiting for this network""" |
551
|
|
|
self.limit_rate = True |
552
|
|
|
|
553
|
|
|
def disable_rate_limit(self): |
554
|
|
|
"""Disables rate limiting for this network""" |
555
|
|
|
self.limit_rate = False |
556
|
|
|
|
557
|
|
|
def is_rate_limited(self): |
558
|
|
|
"""Return True if web service calls are rate limited""" |
559
|
|
|
return self.limit_rate |
560
|
|
|
|
561
|
|
|
def enable_caching(self, file_path=None): |
562
|
|
|
"""Enables caching request-wide for all cacheable calls. |
563
|
|
|
|
564
|
|
|
* file_path: A file path for the backend storage file. If |
565
|
|
|
None set, a temp file would probably be created, according the backend. |
566
|
|
|
""" |
567
|
|
|
|
568
|
|
|
if not file_path: |
569
|
|
|
file_path = tempfile.mktemp(prefix="pylast_tmp_") |
570
|
|
|
|
571
|
|
|
self.cache_backend = _ShelfCacheBackend(file_path) |
572
|
|
|
|
573
|
|
|
def disable_caching(self): |
574
|
|
|
"""Disables all caching features.""" |
575
|
|
|
|
576
|
|
|
self.cache_backend = None |
577
|
|
|
|
578
|
|
|
def is_caching_enabled(self): |
579
|
|
|
"""Returns True if caching is enabled.""" |
580
|
|
|
|
581
|
|
|
return not (self.cache_backend is None) |
582
|
|
|
|
583
|
|
|
def _get_cache_backend(self): |
584
|
|
|
|
585
|
|
|
return self.cache_backend |
586
|
|
|
|
587
|
|
|
def search_for_album(self, album_name): |
588
|
|
|
"""Searches for an album by its name. Returns a AlbumSearch object. |
589
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
590
|
|
|
|
591
|
|
|
return AlbumSearch(album_name, self) |
592
|
|
|
|
593
|
|
|
def search_for_artist(self, artist_name): |
594
|
|
|
"""Searches of an artist by its name. Returns a ArtistSearch object. |
595
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
596
|
|
|
|
597
|
|
|
return ArtistSearch(artist_name, self) |
598
|
|
|
|
599
|
|
|
def search_for_tag(self, tag_name): |
600
|
|
|
"""Searches of a tag by its name. Returns a TagSearch object. |
601
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
602
|
|
|
|
603
|
|
|
return TagSearch(tag_name, self) |
604
|
|
|
|
605
|
|
|
def search_for_track(self, artist_name, track_name): |
606
|
|
|
"""Searches of a track by its name and its artist. Set artist to an |
607
|
|
|
empty string if not available. |
608
|
|
|
Returns a TrackSearch object. |
609
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
610
|
|
|
|
611
|
|
|
return TrackSearch(artist_name, track_name, self) |
612
|
|
|
|
613
|
|
|
def search_for_venue(self, venue_name, country_name): |
614
|
|
|
"""Searches of a venue by its name and its country. Set country_name to |
615
|
|
|
an empty string if not available. |
616
|
|
|
Returns a VenueSearch object. |
617
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
618
|
|
|
|
619
|
|
|
return VenueSearch(venue_name, country_name, self) |
620
|
|
|
|
621
|
|
|
def get_track_by_mbid(self, mbid): |
622
|
|
|
"""Looks up a track by its MusicBrainz ID""" |
623
|
|
|
|
624
|
|
|
params = {"mbid": mbid} |
625
|
|
|
|
626
|
|
|
doc = _Request(self, "track.getInfo", params).execute(True) |
627
|
|
|
|
628
|
|
|
return Track(_extract(doc, "name", 1), _extract(doc, "name"), self) |
629
|
|
|
|
630
|
|
|
def get_artist_by_mbid(self, mbid): |
631
|
|
|
"""Loooks up an artist by its MusicBrainz ID""" |
632
|
|
|
|
633
|
|
|
params = {"mbid": mbid} |
634
|
|
|
|
635
|
|
|
doc = _Request(self, "artist.getInfo", params).execute(True) |
636
|
|
|
|
637
|
|
|
return Artist(_extract(doc, "name"), self) |
638
|
|
|
|
639
|
|
|
def get_album_by_mbid(self, mbid): |
640
|
|
|
"""Looks up an album by its MusicBrainz ID""" |
641
|
|
|
|
642
|
|
|
params = {"mbid": mbid} |
643
|
|
|
|
644
|
|
|
doc = _Request(self, "album.getInfo", params).execute(True) |
645
|
|
|
|
646
|
|
|
return Album(_extract(doc, "artist"), _extract(doc, "name"), self) |
647
|
|
|
|
648
|
|
|
def update_now_playing( |
649
|
|
|
self, artist, title, album=None, album_artist=None, |
650
|
|
|
duration=None, track_number=None, mbid=None, context=None): |
651
|
|
|
""" |
652
|
|
|
Used to notify Last.fm that a user has started listening to a track. |
653
|
|
|
|
654
|
|
|
Parameters: |
655
|
|
|
artist (Required) : The artist name |
656
|
|
|
title (Required) : The track title |
657
|
|
|
album (Optional) : The album name. |
658
|
|
|
album_artist (Optional) : The album artist - if this differs |
659
|
|
|
from the track artist. |
660
|
|
|
duration (Optional) : The length of the track in seconds. |
661
|
|
|
track_number (Optional) : The track number of the track on the |
662
|
|
|
album. |
663
|
|
|
mbid (Optional) : The MusicBrainz Track ID. |
664
|
|
|
context (Optional) : Sub-client version |
665
|
|
|
(not public, only enabled for certain API keys) |
666
|
|
|
""" |
667
|
|
|
|
668
|
|
|
params = {"track": title, "artist": artist} |
669
|
|
|
|
670
|
|
|
if album: |
671
|
|
|
params["album"] = album |
672
|
|
|
if album_artist: |
673
|
|
|
params["albumArtist"] = album_artist |
674
|
|
|
if context: |
675
|
|
|
params["context"] = context |
676
|
|
|
if track_number: |
677
|
|
|
params["trackNumber"] = track_number |
678
|
|
|
if mbid: |
679
|
|
|
params["mbid"] = mbid |
680
|
|
|
if duration: |
681
|
|
|
params["duration"] = duration |
682
|
|
|
|
683
|
|
|
_Request(self, "track.updateNowPlaying", params).execute() |
684
|
|
|
|
685
|
|
|
def scrobble( |
686
|
|
|
self, artist, title, timestamp, album=None, album_artist=None, |
687
|
|
|
track_number=None, duration=None, stream_id=None, context=None, |
688
|
|
|
mbid=None): |
689
|
|
|
|
690
|
|
|
"""Used to add a track-play to a user's profile. |
691
|
|
|
|
692
|
|
|
Parameters: |
693
|
|
|
artist (Required) : The artist name. |
694
|
|
|
title (Required) : The track name. |
695
|
|
|
timestamp (Required) : The time the track started playing, in UNIX |
696
|
|
|
timestamp format (integer number of seconds since 00:00:00, |
697
|
|
|
January 1st 1970 UTC). This must be in the UTC time zone. |
698
|
|
|
album (Optional) : The album name. |
699
|
|
|
album_artist (Optional) : The album artist - if this differs from |
700
|
|
|
the track artist. |
701
|
|
|
context (Optional) : Sub-client version (not public, only enabled |
702
|
|
|
for certain API keys) |
703
|
|
|
stream_id (Optional) : The stream id for this track received from |
704
|
|
|
the radio.getPlaylist service. |
705
|
|
|
track_number (Optional) : The track number of the track on the |
706
|
|
|
album. |
707
|
|
|
mbid (Optional) : The MusicBrainz Track ID. |
708
|
|
|
duration (Optional) : The length of the track in seconds. |
709
|
|
|
""" |
710
|
|
|
|
711
|
|
|
return self.scrobble_many(({ |
712
|
|
|
"artist": artist, "title": title, "timestamp": timestamp, |
713
|
|
|
"album": album, "album_artist": album_artist, |
714
|
|
|
"track_number": track_number, "duration": duration, |
715
|
|
|
"stream_id": stream_id, "context": context, "mbid": mbid},)) |
716
|
|
|
|
717
|
|
|
def scrobble_many(self, tracks): |
718
|
|
|
""" |
719
|
|
|
Used to scrobble a batch of tracks at once. The parameter tracks is a |
720
|
|
|
sequence of dicts per track containing the keyword arguments as if |
721
|
|
|
passed to the scrobble() method. |
722
|
|
|
""" |
723
|
|
|
|
724
|
|
|
tracks_to_scrobble = tracks[:50] |
725
|
|
|
if len(tracks) > 50: |
726
|
|
|
remaining_tracks = tracks[50:] |
727
|
|
|
else: |
728
|
|
|
remaining_tracks = None |
729
|
|
|
|
730
|
|
|
params = {} |
731
|
|
|
for i in range(len(tracks_to_scrobble)): |
732
|
|
|
|
733
|
|
|
params["artist[%d]" % i] = tracks_to_scrobble[i]["artist"] |
734
|
|
|
params["track[%d]" % i] = tracks_to_scrobble[i]["title"] |
735
|
|
|
|
736
|
|
|
additional_args = ( |
737
|
|
|
"timestamp", "album", "album_artist", "context", |
738
|
|
|
"stream_id", "track_number", "mbid", "duration") |
739
|
|
|
args_map_to = { # so friggin lazy |
740
|
|
|
"album_artist": "albumArtist", |
741
|
|
|
"track_number": "trackNumber", |
742
|
|
|
"stream_id": "streamID"} |
743
|
|
|
|
744
|
|
|
for arg in additional_args: |
745
|
|
|
|
746
|
|
|
if arg in tracks_to_scrobble[i] and tracks_to_scrobble[i][arg]: |
747
|
|
|
if arg in args_map_to: |
748
|
|
|
maps_to = args_map_to[arg] |
749
|
|
|
else: |
750
|
|
|
maps_to = arg |
751
|
|
|
|
752
|
|
|
params[ |
753
|
|
|
"%s[%d]" % (maps_to, i)] = tracks_to_scrobble[i][arg] |
754
|
|
|
|
755
|
|
|
_Request(self, "track.scrobble", params).execute() |
756
|
|
|
|
757
|
|
|
if remaining_tracks: |
758
|
|
|
self.scrobble_many(remaining_tracks) |
759
|
|
|
|
760
|
|
|
def get_play_links(self, link_type, things, cacheable=True): |
761
|
|
|
method = link_type + ".getPlaylinks" |
762
|
|
|
params = {} |
763
|
|
|
|
764
|
|
|
for i, thing in enumerate(things): |
765
|
|
|
if link_type == "artist": |
766
|
|
|
params['artist[' + str(i) + ']'] = thing |
767
|
|
|
elif link_type == "album": |
768
|
|
|
params['artist[' + str(i) + ']'] = thing.artist |
769
|
|
|
params['album[' + str(i) + ']'] = thing.title |
770
|
|
|
elif link_type == "track": |
771
|
|
|
params['artist[' + str(i) + ']'] = thing.artist |
772
|
|
|
params['track[' + str(i) + ']'] = thing.title |
773
|
|
|
|
774
|
|
|
doc = _Request(self, method, params).execute(cacheable) |
775
|
|
|
|
776
|
|
|
seq = [] |
777
|
|
|
|
778
|
|
|
for node in doc.getElementsByTagName("externalids"): |
779
|
|
|
spotify = _extract(node, "spotify") |
780
|
|
|
seq.append(spotify) |
781
|
|
|
|
782
|
|
|
return seq |
783
|
|
|
|
784
|
|
|
def get_artist_play_links(self, artists, cacheable=True): |
785
|
|
|
return self.get_play_links("artist", artists, cacheable) |
786
|
|
|
|
787
|
|
|
def get_album_play_links(self, albums, cacheable=True): |
788
|
|
|
return self.get_play_links("album", albums, cacheable) |
789
|
|
|
|
790
|
|
|
def get_track_play_links(self, tracks, cacheable=True): |
791
|
|
|
return self.get_play_links("track", tracks, cacheable) |
792
|
|
|
|
793
|
|
|
|
794
|
|
View Code Duplication |
class LastFMNetwork(_Network): |
|
|
|
|
795
|
|
|
|
796
|
|
|
"""A Last.fm network object |
797
|
|
|
|
798
|
|
|
api_key: a provided API_KEY |
799
|
|
|
api_secret: a provided API_SECRET |
800
|
|
|
session_key: a generated session_key or None |
801
|
|
|
username: a username of a valid user |
802
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
803
|
|
|
user's password |
804
|
|
|
|
805
|
|
|
if username and password_hash were provided and not session_key, |
806
|
|
|
session_key will be generated automatically when needed. |
807
|
|
|
|
808
|
|
|
Either a valid session_key or a combination of username and password_hash |
809
|
|
|
must be present for scrobbling. |
810
|
|
|
|
811
|
|
|
Most read-only webservices only require an api_key and an api_secret, see |
812
|
|
|
about obtaining them from: |
813
|
|
|
http://www.last.fm/api/account |
814
|
|
|
""" |
815
|
|
|
|
816
|
|
|
def __init__( |
817
|
|
|
self, api_key="", api_secret="", session_key="", username="", |
818
|
|
|
password_hash=""): |
819
|
|
|
_Network.__init__( |
820
|
|
|
self, |
821
|
|
|
name="Last.fm", |
822
|
|
|
homepage="http://last.fm", |
823
|
|
|
ws_server=("ws.audioscrobbler.com", "/2.0/"), |
824
|
|
|
api_key=api_key, |
825
|
|
|
api_secret=api_secret, |
826
|
|
|
session_key=session_key, |
827
|
|
|
submission_server="http://post.audioscrobbler.com:80/", |
828
|
|
|
username=username, |
829
|
|
|
password_hash=password_hash, |
830
|
|
|
domain_names={ |
831
|
|
|
DOMAIN_ENGLISH: 'www.last.fm', |
832
|
|
|
DOMAIN_GERMAN: 'www.lastfm.de', |
833
|
|
|
DOMAIN_SPANISH: 'www.lastfm.es', |
834
|
|
|
DOMAIN_FRENCH: 'www.lastfm.fr', |
835
|
|
|
DOMAIN_ITALIAN: 'www.lastfm.it', |
836
|
|
|
DOMAIN_POLISH: 'www.lastfm.pl', |
837
|
|
|
DOMAIN_PORTUGUESE: 'www.lastfm.com.br', |
838
|
|
|
DOMAIN_SWEDISH: 'www.lastfm.se', |
839
|
|
|
DOMAIN_TURKISH: 'www.lastfm.com.tr', |
840
|
|
|
DOMAIN_RUSSIAN: 'www.lastfm.ru', |
841
|
|
|
DOMAIN_JAPANESE: 'www.lastfm.jp', |
842
|
|
|
DOMAIN_CHINESE: 'cn.last.fm', |
843
|
|
|
}, |
844
|
|
|
urls={ |
845
|
|
|
"album": "music/%(artist)s/%(album)s", |
846
|
|
|
"artist": "music/%(artist)s", |
847
|
|
|
"event": "event/%(id)s", |
848
|
|
|
"country": "place/%(country_name)s", |
849
|
|
|
"playlist": "user/%(user)s/library/playlists/%(appendix)s", |
850
|
|
|
"tag": "tag/%(name)s", |
851
|
|
|
"track": "music/%(artist)s/_/%(title)s", |
852
|
|
|
"group": "group/%(name)s", |
853
|
|
|
"user": "user/%(name)s", |
854
|
|
|
} |
855
|
|
|
) |
856
|
|
|
|
857
|
|
|
def __repr__(self): |
858
|
|
|
return "pylast.LastFMNetwork(%s)" % (", ".join( |
859
|
|
|
("'%s'" % self.api_key, |
860
|
|
|
"'%s'" % self.api_secret, |
861
|
|
|
"'%s'" % self.session_key, |
862
|
|
|
"'%s'" % self.username, |
863
|
|
|
"'%s'" % self.password_hash))) |
864
|
|
|
|
865
|
|
|
|
866
|
|
|
def get_lastfm_network( |
867
|
|
|
api_key="", api_secret="", session_key="", username="", |
868
|
|
|
password_hash=""): |
869
|
|
|
""" |
870
|
|
|
Returns a preconfigured _Network object for Last.fm |
871
|
|
|
|
872
|
|
|
api_key: a provided API_KEY |
873
|
|
|
api_secret: a provided API_SECRET |
874
|
|
|
session_key: a generated session_key or None |
875
|
|
|
username: a username of a valid user |
876
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
877
|
|
|
user's password |
878
|
|
|
|
879
|
|
|
if username and password_hash were provided and not session_key, |
880
|
|
|
session_key will be generated automatically when needed. |
881
|
|
|
|
882
|
|
|
Either a valid session_key or a combination of username and password_hash |
883
|
|
|
must be present for scrobbling. |
884
|
|
|
|
885
|
|
|
Most read-only webservices only require an api_key and an api_secret, see |
886
|
|
|
about obtaining them from: |
887
|
|
|
http://www.last.fm/api/account |
888
|
|
|
""" |
889
|
|
|
|
890
|
|
|
_deprecation_warning("Create a LastFMNetwork object instead") |
891
|
|
|
|
892
|
|
|
return LastFMNetwork( |
893
|
|
|
api_key, api_secret, session_key, username, password_hash) |
894
|
|
|
|
895
|
|
|
|
896
|
|
View Code Duplication |
class LibreFMNetwork(_Network): |
|
|
|
|
897
|
|
|
""" |
898
|
|
|
A preconfigured _Network object for Libre.fm |
899
|
|
|
|
900
|
|
|
api_key: a provided API_KEY |
901
|
|
|
api_secret: a provided API_SECRET |
902
|
|
|
session_key: a generated session_key or None |
903
|
|
|
username: a username of a valid user |
904
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
905
|
|
|
user's password |
906
|
|
|
|
907
|
|
|
if username and password_hash were provided and not session_key, |
908
|
|
|
session_key will be generated automatically when needed. |
909
|
|
|
""" |
910
|
|
|
|
911
|
|
|
def __init__( |
912
|
|
|
self, api_key="", api_secret="", session_key="", username="", |
913
|
|
|
password_hash=""): |
914
|
|
|
|
915
|
|
|
_Network.__init__( |
916
|
|
|
self, |
917
|
|
|
name="Libre.fm", |
918
|
|
|
homepage="http://libre.fm", |
919
|
|
|
ws_server=("libre.fm", "/2.0/"), |
920
|
|
|
api_key=api_key, |
921
|
|
|
api_secret=api_secret, |
922
|
|
|
session_key=session_key, |
923
|
|
|
submission_server="http://turtle.libre.fm:80/", |
924
|
|
|
username=username, |
925
|
|
|
password_hash=password_hash, |
926
|
|
|
domain_names={ |
927
|
|
|
DOMAIN_ENGLISH: "libre.fm", |
928
|
|
|
DOMAIN_GERMAN: "libre.fm", |
929
|
|
|
DOMAIN_SPANISH: "libre.fm", |
930
|
|
|
DOMAIN_FRENCH: "libre.fm", |
931
|
|
|
DOMAIN_ITALIAN: "libre.fm", |
932
|
|
|
DOMAIN_POLISH: "libre.fm", |
933
|
|
|
DOMAIN_PORTUGUESE: "libre.fm", |
934
|
|
|
DOMAIN_SWEDISH: "libre.fm", |
935
|
|
|
DOMAIN_TURKISH: "libre.fm", |
936
|
|
|
DOMAIN_RUSSIAN: "libre.fm", |
937
|
|
|
DOMAIN_JAPANESE: "libre.fm", |
938
|
|
|
DOMAIN_CHINESE: "libre.fm", |
939
|
|
|
}, |
940
|
|
|
urls={ |
941
|
|
|
"album": "artist/%(artist)s/album/%(album)s", |
942
|
|
|
"artist": "artist/%(artist)s", |
943
|
|
|
"event": "event/%(id)s", |
944
|
|
|
"country": "place/%(country_name)s", |
945
|
|
|
"playlist": "user/%(user)s/library/playlists/%(appendix)s", |
946
|
|
|
"tag": "tag/%(name)s", |
947
|
|
|
"track": "music/%(artist)s/_/%(title)s", |
948
|
|
|
"group": "group/%(name)s", |
949
|
|
|
"user": "user/%(name)s", |
950
|
|
|
} |
951
|
|
|
) |
952
|
|
|
|
953
|
|
|
def __repr__(self): |
954
|
|
|
return "pylast.LibreFMNetwork(%s)" % (", ".join( |
955
|
|
|
("'%s'" % self.api_key, |
956
|
|
|
"'%s'" % self.api_secret, |
957
|
|
|
"'%s'" % self.session_key, |
958
|
|
|
"'%s'" % self.username, |
959
|
|
|
"'%s'" % self.password_hash))) |
960
|
|
|
|
961
|
|
|
|
962
|
|
|
def get_librefm_network( |
963
|
|
|
api_key="", api_secret="", session_key="", username="", |
964
|
|
|
password_hash=""): |
965
|
|
|
""" |
966
|
|
|
Returns a preconfigured _Network object for Libre.fm |
967
|
|
|
|
968
|
|
|
api_key: a provided API_KEY |
969
|
|
|
api_secret: a provided API_SECRET |
970
|
|
|
session_key: a generated session_key or None |
971
|
|
|
username: a username of a valid user |
972
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
973
|
|
|
user's password |
974
|
|
|
|
975
|
|
|
if username and password_hash were provided and not session_key, |
976
|
|
|
session_key will be generated automatically when needed. |
977
|
|
|
""" |
978
|
|
|
|
979
|
|
|
_deprecation_warning( |
980
|
|
|
"DeprecationWarning: Create a LibreFMNetwork object instead") |
981
|
|
|
|
982
|
|
|
return LibreFMNetwork( |
983
|
|
|
api_key, api_secret, session_key, username, password_hash) |
984
|
|
|
|
985
|
|
|
|
986
|
|
|
class _ShelfCacheBackend(object): |
987
|
|
|
"""Used as a backend for caching cacheable requests.""" |
988
|
|
|
def __init__(self, file_path=None): |
989
|
|
|
self.shelf = shelve.open(file_path) |
990
|
|
|
|
991
|
|
|
def __iter__(self): |
992
|
|
|
return iter(self.shelf.keys()) |
993
|
|
|
|
994
|
|
|
def get_xml(self, key): |
995
|
|
|
return self.shelf[key] |
996
|
|
|
|
997
|
|
|
def set_xml(self, key, xml_string): |
998
|
|
|
self.shelf[key] = xml_string |
999
|
|
|
|
1000
|
|
|
|
1001
|
|
|
class _Request(object): |
1002
|
|
|
"""Representing an abstract web service operation.""" |
1003
|
|
|
|
1004
|
|
|
def __init__(self, network, method_name, params={}): |
1005
|
|
|
|
1006
|
|
|
self.network = network |
1007
|
|
|
self.params = {} |
1008
|
|
|
|
1009
|
|
|
for key in params: |
1010
|
|
|
self.params[key] = _unicode(params[key]) |
1011
|
|
|
|
1012
|
|
|
(self.api_key, self.api_secret, self.session_key) = \ |
1013
|
|
|
network._get_ws_auth() |
1014
|
|
|
|
1015
|
|
|
self.params["api_key"] = self.api_key |
1016
|
|
|
self.params["method"] = method_name |
1017
|
|
|
|
1018
|
|
|
if network.is_caching_enabled(): |
1019
|
|
|
self.cache = network._get_cache_backend() |
1020
|
|
|
|
1021
|
|
|
if self.session_key: |
1022
|
|
|
self.params["sk"] = self.session_key |
1023
|
|
|
self.sign_it() |
1024
|
|
|
|
1025
|
|
|
def sign_it(self): |
1026
|
|
|
"""Sign this request.""" |
1027
|
|
|
|
1028
|
|
|
if "api_sig" not in self.params.keys(): |
1029
|
|
|
self.params['api_sig'] = self._get_signature() |
1030
|
|
|
|
1031
|
|
|
def _get_signature(self): |
1032
|
|
|
""" |
1033
|
|
|
Returns a 32-character hexadecimal md5 hash of the signature string. |
1034
|
|
|
""" |
1035
|
|
|
|
1036
|
|
|
keys = list(self.params.keys()) |
1037
|
|
|
|
1038
|
|
|
keys.sort() |
1039
|
|
|
|
1040
|
|
|
string = "" |
1041
|
|
|
|
1042
|
|
|
for name in keys: |
1043
|
|
|
string += name |
1044
|
|
|
string += self.params[name] |
1045
|
|
|
|
1046
|
|
|
string += self.api_secret |
1047
|
|
|
|
1048
|
|
|
return md5(string) |
1049
|
|
|
|
1050
|
|
|
def _get_cache_key(self): |
1051
|
|
|
""" |
1052
|
|
|
The cache key is a string of concatenated sorted names and values. |
1053
|
|
|
""" |
1054
|
|
|
|
1055
|
|
|
keys = list(self.params.keys()) |
1056
|
|
|
keys.sort() |
1057
|
|
|
|
1058
|
|
|
cache_key = str() |
1059
|
|
|
|
1060
|
|
|
for key in keys: |
1061
|
|
|
if key != "api_sig" and key != "api_key" and key != "sk": |
1062
|
|
|
cache_key += key + self.params[key] |
1063
|
|
|
|
1064
|
|
|
return hashlib.sha1(cache_key.encode("utf-8")).hexdigest() |
1065
|
|
|
|
1066
|
|
|
def _get_cached_response(self): |
1067
|
|
|
"""Returns a file object of the cached response.""" |
1068
|
|
|
|
1069
|
|
|
if not self._is_cached(): |
1070
|
|
|
response = self._download_response() |
1071
|
|
|
self.cache.set_xml(self._get_cache_key(), response) |
1072
|
|
|
|
1073
|
|
|
return self.cache.get_xml(self._get_cache_key()) |
1074
|
|
|
|
1075
|
|
|
def _is_cached(self): |
1076
|
|
|
"""Returns True if the request is already in cache.""" |
1077
|
|
|
|
1078
|
|
|
return self._get_cache_key() in self.cache |
1079
|
|
|
|
1080
|
|
|
def _download_response(self): |
1081
|
|
|
"""Returns a response body string from the server.""" |
1082
|
|
|
|
1083
|
|
|
if self.network.limit_rate: |
1084
|
|
|
self.network._delay_call() |
1085
|
|
|
|
1086
|
|
|
data = [] |
1087
|
|
|
for name in self.params.keys(): |
1088
|
|
|
data.append('='.join(( |
1089
|
|
|
name, url_quote_plus(_string(self.params[name]))))) |
1090
|
|
|
data = '&'.join(data) |
1091
|
|
|
|
1092
|
|
|
if "api_sig" in self.params.keys(): |
1093
|
|
|
method = "POST" |
1094
|
|
|
url_parameters = "" |
1095
|
|
|
else: |
1096
|
|
|
method = "GET" |
1097
|
|
|
url_parameters = "?" + data |
1098
|
|
|
|
1099
|
|
|
headers = { |
1100
|
|
|
"Content-type": "application/x-www-form-urlencoded", |
1101
|
|
|
'Accept-Charset': 'utf-8', |
1102
|
|
|
'User-Agent': "pylast" + '/' + __version__ |
1103
|
|
|
} |
1104
|
|
|
|
1105
|
|
|
(HOST_NAME, HOST_SUBDIR) = self.network.ws_server |
1106
|
|
|
|
1107
|
|
|
if self.network.is_proxy_enabled(): |
1108
|
|
|
conn = HTTPConnection( |
1109
|
|
|
host=self.network._get_proxy()[0], |
1110
|
|
|
port=self.network._get_proxy()[1]) |
1111
|
|
|
|
1112
|
|
|
try: |
1113
|
|
|
conn.request( |
1114
|
|
|
url="http://" + HOST_NAME + HOST_SUBDIR + url_parameters, |
1115
|
|
|
method=method, body=data, headers=headers) |
1116
|
|
|
except Exception as e: |
1117
|
|
|
raise NetworkError(self.network, e) |
1118
|
|
|
|
1119
|
|
|
else: |
1120
|
|
|
conn = HTTPConnection(host=HOST_NAME) |
1121
|
|
|
|
1122
|
|
|
try: |
1123
|
|
|
conn.request( |
1124
|
|
|
url=HOST_SUBDIR + url_parameters, body=data, |
1125
|
|
|
method=method, headers=headers) |
1126
|
|
|
# method='GET', url=HOST_SUBDIR + "?" + data) |
1127
|
|
|
except Exception as e: |
1128
|
|
|
raise NetworkError(self.network, e) |
1129
|
|
|
|
1130
|
|
|
try: |
1131
|
|
|
response_text = _unicode(conn.getresponse().read()) |
1132
|
|
|
except Exception as e: |
1133
|
|
|
raise MalformedResponseError(self.network, e) |
1134
|
|
|
|
1135
|
|
|
response_text = XML_ILLEGAL.sub("?", response_text) |
1136
|
|
|
|
1137
|
|
|
self._check_response_for_errors(response_text) |
1138
|
|
|
return response_text |
1139
|
|
|
|
1140
|
|
|
def execute(self, cacheable=False): |
1141
|
|
|
"""Returns the XML DOM response of the POST Request from the server""" |
1142
|
|
|
|
1143
|
|
|
if self.network.is_caching_enabled() and cacheable: |
1144
|
|
|
response = self._get_cached_response() |
1145
|
|
|
else: |
1146
|
|
|
response = self._download_response() |
1147
|
|
|
|
1148
|
|
|
return minidom.parseString(_string(response).replace( |
1149
|
|
|
"opensearch:", "")) |
1150
|
|
|
|
1151
|
|
|
def _check_response_for_errors(self, response): |
1152
|
|
|
"""Checks the response for errors and raises one if any exists.""" |
1153
|
|
|
|
1154
|
|
|
try: |
1155
|
|
|
doc = minidom.parseString(_string(response).replace( |
1156
|
|
|
"opensearch:", "")) |
1157
|
|
|
except Exception as e: |
1158
|
|
|
raise MalformedResponseError(self.network, e) |
1159
|
|
|
|
1160
|
|
|
e = doc.getElementsByTagName('lfm')[0] |
1161
|
|
|
|
1162
|
|
|
if e.getAttribute('status') != "ok": |
1163
|
|
|
e = doc.getElementsByTagName('error')[0] |
1164
|
|
|
status = e.getAttribute('code') |
1165
|
|
|
details = e.firstChild.data.strip() |
1166
|
|
|
raise WSError(self.network, status, details) |
1167
|
|
|
|
1168
|
|
|
|
1169
|
|
|
class SessionKeyGenerator(object): |
1170
|
|
|
"""Methods of generating a session key: |
1171
|
|
|
1) Web Authentication: |
1172
|
|
|
a. network = get_*_network(API_KEY, API_SECRET) |
1173
|
|
|
b. sg = SessionKeyGenerator(network) |
1174
|
|
|
c. url = sg.get_web_auth_url() |
1175
|
|
|
d. Ask the user to open the url and authorize you, and wait for it. |
1176
|
|
|
e. session_key = sg.get_web_auth_session_key(url) |
1177
|
|
|
2) Username and Password Authentication: |
1178
|
|
|
a. network = get_*_network(API_KEY, API_SECRET) |
1179
|
|
|
b. username = raw_input("Please enter your username: ") |
1180
|
|
|
c. password_hash = pylast.md5(raw_input("Please enter your password: ") |
1181
|
|
|
d. session_key = SessionKeyGenerator(network).get_session_key(username, |
1182
|
|
|
password_hash) |
1183
|
|
|
|
1184
|
|
|
A session key's lifetime is infinite, unless the user revokes the rights |
1185
|
|
|
of the given API Key. |
1186
|
|
|
|
1187
|
|
|
If you create a Network object with just a API_KEY and API_SECRET and a |
1188
|
|
|
username and a password_hash, a SESSION_KEY will be automatically generated |
1189
|
|
|
for that network and stored in it so you don't have to do this manually, |
1190
|
|
|
unless you want to. |
1191
|
|
|
""" |
1192
|
|
|
|
1193
|
|
|
def __init__(self, network): |
1194
|
|
|
self.network = network |
1195
|
|
|
self.web_auth_tokens = {} |
1196
|
|
|
|
1197
|
|
|
def _get_web_auth_token(self): |
1198
|
|
|
""" |
1199
|
|
|
Retrieves a token from the network for web authentication. |
1200
|
|
|
The token then has to be authorized from getAuthURL before creating |
1201
|
|
|
session. |
1202
|
|
|
""" |
1203
|
|
|
|
1204
|
|
|
request = _Request(self.network, 'auth.getToken') |
1205
|
|
|
|
1206
|
|
|
# default action is that a request is signed only when |
1207
|
|
|
# a session key is provided. |
1208
|
|
|
request.sign_it() |
1209
|
|
|
|
1210
|
|
|
doc = request.execute() |
1211
|
|
|
|
1212
|
|
|
e = doc.getElementsByTagName('token')[0] |
1213
|
|
|
return e.firstChild.data |
1214
|
|
|
|
1215
|
|
|
def get_web_auth_url(self): |
1216
|
|
|
""" |
1217
|
|
|
The user must open this page, and you first, then |
1218
|
|
|
call get_web_auth_session_key(url) after that. |
1219
|
|
|
""" |
1220
|
|
|
|
1221
|
|
|
token = self._get_web_auth_token() |
1222
|
|
|
|
1223
|
|
|
url = '%(homepage)s/api/auth/?api_key=%(api)s&token=%(token)s' % \ |
1224
|
|
|
{"homepage": self.network.homepage, |
1225
|
|
|
"api": self.network.api_key, "token": token} |
1226
|
|
|
|
1227
|
|
|
self.web_auth_tokens[url] = token |
1228
|
|
|
|
1229
|
|
|
return url |
1230
|
|
|
|
1231
|
|
|
def get_web_auth_session_key(self, url): |
1232
|
|
|
""" |
1233
|
|
|
Retrieves the session key of a web authorization process by its url. |
1234
|
|
|
""" |
1235
|
|
|
|
1236
|
|
|
if url in self.web_auth_tokens.keys(): |
1237
|
|
|
token = self.web_auth_tokens[url] |
1238
|
|
|
else: |
1239
|
|
|
# That's going to raise a WSError of an unauthorized token when the |
1240
|
|
|
# request is executed. |
1241
|
|
|
token = "" |
1242
|
|
|
|
1243
|
|
|
request = _Request(self.network, 'auth.getSession', {'token': token}) |
1244
|
|
|
|
1245
|
|
|
# default action is that a request is signed only when |
1246
|
|
|
# a session key is provided. |
1247
|
|
|
request.sign_it() |
1248
|
|
|
|
1249
|
|
|
doc = request.execute() |
1250
|
|
|
|
1251
|
|
|
return doc.getElementsByTagName('key')[0].firstChild.data |
1252
|
|
|
|
1253
|
|
|
def get_session_key(self, username, password_hash): |
1254
|
|
|
""" |
1255
|
|
|
Retrieve a session key with a username and a md5 hash of the user's |
1256
|
|
|
password. |
1257
|
|
|
""" |
1258
|
|
|
|
1259
|
|
|
params = { |
1260
|
|
|
"username": username, "authToken": md5(username + password_hash)} |
1261
|
|
|
request = _Request(self.network, "auth.getMobileSession", params) |
1262
|
|
|
|
1263
|
|
|
# default action is that a request is signed only when |
1264
|
|
|
# a session key is provided. |
1265
|
|
|
request.sign_it() |
1266
|
|
|
|
1267
|
|
|
doc = request.execute() |
1268
|
|
|
|
1269
|
|
|
return _extract(doc, "key") |
1270
|
|
|
|
1271
|
|
|
TopItem = collections.namedtuple("TopItem", ["item", "weight"]) |
1272
|
|
|
SimilarItem = collections.namedtuple("SimilarItem", ["item", "match"]) |
1273
|
|
|
LibraryItem = collections.namedtuple( |
1274
|
|
|
"LibraryItem", ["item", "playcount", "tagcount"]) |
1275
|
|
|
PlayedTrack = collections.namedtuple( |
1276
|
|
|
"PlayedTrack", ["track", "album", "playback_date", "timestamp"]) |
1277
|
|
|
LovedTrack = collections.namedtuple( |
1278
|
|
|
"LovedTrack", ["track", "date", "timestamp"]) |
1279
|
|
|
ImageSizes = collections.namedtuple( |
1280
|
|
|
"ImageSizes", [ |
1281
|
|
|
"original", "large", "largesquare", "medium", "small", "extralarge"]) |
1282
|
|
|
Image = collections.namedtuple( |
1283
|
|
|
"Image", [ |
1284
|
|
|
"title", "url", "dateadded", "format", "owner", "sizes", "votes"]) |
1285
|
|
|
Shout = collections.namedtuple( |
1286
|
|
|
"Shout", ["body", "author", "date"]) |
1287
|
|
|
|
1288
|
|
|
|
1289
|
|
|
def _string_output(funct): |
1290
|
|
|
def r(*args): |
1291
|
|
|
return _string(funct(*args)) |
1292
|
|
|
|
1293
|
|
|
return r |
1294
|
|
|
|
1295
|
|
|
|
1296
|
|
|
def _pad_list(given_list, desired_length, padding=None): |
1297
|
|
|
""" |
1298
|
|
|
Pads a list to be of the desired_length. |
1299
|
|
|
""" |
1300
|
|
|
|
1301
|
|
|
while len(given_list) < desired_length: |
1302
|
|
|
given_list.append(padding) |
1303
|
|
|
|
1304
|
|
|
return given_list |
1305
|
|
|
|
1306
|
|
|
|
1307
|
|
|
class _BaseObject(object): |
1308
|
|
|
"""An abstract webservices object.""" |
1309
|
|
|
|
1310
|
|
|
network = None |
1311
|
|
|
|
1312
|
|
|
def __init__(self, network, ws_prefix): |
1313
|
|
|
self.network = network |
1314
|
|
|
self.ws_prefix = ws_prefix |
1315
|
|
|
|
1316
|
|
|
def _request(self, method_name, cacheable=False, params=None): |
1317
|
|
|
if not params: |
1318
|
|
|
params = self._get_params() |
1319
|
|
|
|
1320
|
|
|
return _Request(self.network, method_name, params).execute(cacheable) |
1321
|
|
|
|
1322
|
|
|
def _get_params(self): |
1323
|
|
|
"""Returns the most common set of parameters between all objects.""" |
1324
|
|
|
|
1325
|
|
|
return {} |
1326
|
|
|
|
1327
|
|
|
def __hash__(self): |
1328
|
|
|
# Convert any ints (or whatever) into strings |
1329
|
|
|
values = map(six.text_type, self._get_params().values()) |
1330
|
|
|
|
1331
|
|
|
return hash(self.network) + hash(six.text_type(type(self)) + "".join( |
1332
|
|
|
list(self._get_params().keys()) + list(values) |
1333
|
|
|
).lower()) |
1334
|
|
|
|
1335
|
|
|
def _extract_cdata_from_request(self, method_name, tag_name, params): |
1336
|
|
|
doc = self._request(method_name, True, params) |
1337
|
|
|
|
1338
|
|
|
return doc.getElementsByTagName( |
1339
|
|
|
tag_name)[0].firstChild.wholeText.strip() |
1340
|
|
|
|
1341
|
|
|
def _get_things( |
1342
|
|
|
self, method, thing, thing_type, params=None, cacheable=True): |
1343
|
|
|
"""Returns a list of the most played thing_types by this thing.""" |
1344
|
|
|
|
1345
|
|
|
doc = self._request( |
1346
|
|
|
self.ws_prefix + "." + method, cacheable, params) |
1347
|
|
|
|
1348
|
|
|
seq = [] |
1349
|
|
|
for node in doc.getElementsByTagName(thing): |
1350
|
|
|
title = _extract(node, "name") |
1351
|
|
|
artist = _extract(node, "name", 1) |
1352
|
|
|
playcount = _number(_extract(node, "playcount")) |
1353
|
|
|
|
1354
|
|
|
seq.append(TopItem( |
1355
|
|
|
thing_type(artist, title, self.network), playcount)) |
1356
|
|
|
|
1357
|
|
|
return seq |
1358
|
|
|
|
1359
|
|
|
def get_top_fans(self, limit=None, cacheable=True): |
1360
|
|
|
"""Returns a list of the Users who played this the most. |
1361
|
|
|
# Parameters: |
1362
|
|
|
* limit int: Max elements. |
1363
|
|
|
# For Artist/Track |
1364
|
|
|
""" |
1365
|
|
|
|
1366
|
|
|
doc = self._request(self.ws_prefix + '.getTopFans', cacheable) |
1367
|
|
|
|
1368
|
|
|
seq = [] |
1369
|
|
|
|
1370
|
|
|
elements = doc.getElementsByTagName('user') |
1371
|
|
|
|
1372
|
|
|
for element in elements: |
1373
|
|
|
if limit and len(seq) >= limit: |
1374
|
|
|
break |
1375
|
|
|
|
1376
|
|
|
name = _extract(element, 'name') |
1377
|
|
|
weight = _number(_extract(element, 'weight')) |
1378
|
|
|
|
1379
|
|
|
seq.append(TopItem(User(name, self.network), weight)) |
1380
|
|
|
|
1381
|
|
|
return seq |
1382
|
|
|
|
1383
|
|
|
def share(self, users, message=None): |
1384
|
|
|
""" |
1385
|
|
|
Shares this (sends out recommendations). |
1386
|
|
|
Parameters: |
1387
|
|
|
* users [User|str,]: A list that can contain usernames, emails, |
1388
|
|
|
User objects, or all of them. |
1389
|
|
|
* message str: A message to include in the recommendation message. |
1390
|
|
|
Only for Artist/Event/Track. |
1391
|
|
|
""" |
1392
|
|
|
|
1393
|
|
|
# Last.fm currently accepts a max of 10 recipient at a time |
1394
|
|
|
while(len(users) > 10): |
1395
|
|
|
section = users[0:9] |
1396
|
|
|
users = users[9:] |
1397
|
|
|
self.share(section, message) |
1398
|
|
|
|
1399
|
|
|
nusers = [] |
1400
|
|
|
for user in users: |
1401
|
|
|
if isinstance(user, User): |
1402
|
|
|
nusers.append(user.get_name()) |
1403
|
|
|
else: |
1404
|
|
|
nusers.append(user) |
1405
|
|
|
|
1406
|
|
|
params = self._get_params() |
1407
|
|
|
recipients = ','.join(nusers) |
1408
|
|
|
params['recipient'] = recipients |
1409
|
|
|
if message: |
1410
|
|
|
params['message'] = message |
1411
|
|
|
|
1412
|
|
|
self._request(self.ws_prefix + '.share', False, params) |
1413
|
|
|
|
1414
|
|
|
def get_wiki_published_date(self): |
1415
|
|
|
""" |
1416
|
|
|
Returns the summary of the wiki. |
1417
|
|
|
Only for Album/Track. |
1418
|
|
|
""" |
1419
|
|
|
return self.get_wiki("published") |
1420
|
|
|
|
1421
|
|
|
def get_wiki_summary(self): |
1422
|
|
|
""" |
1423
|
|
|
Returns the summary of the wiki. |
1424
|
|
|
Only for Album/Track. |
1425
|
|
|
""" |
1426
|
|
|
return self.get_wiki("summary") |
1427
|
|
|
|
1428
|
|
|
def get_wiki_content(self): |
1429
|
|
|
""" |
1430
|
|
|
Returns the summary of the wiki. |
1431
|
|
|
Only for Album/Track. |
1432
|
|
|
""" |
1433
|
|
|
return self.get_wiki("content") |
1434
|
|
|
|
1435
|
|
|
def get_wiki(self, section): |
1436
|
|
|
""" |
1437
|
|
|
Returns a section of the wiki. |
1438
|
|
|
Only for Album/Track. |
1439
|
|
|
section can be "content", "summary" or |
1440
|
|
|
"published" (for published date) |
1441
|
|
|
""" |
1442
|
|
|
|
1443
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
1444
|
|
|
|
1445
|
|
|
if len(doc.getElementsByTagName("wiki")) == 0: |
1446
|
|
|
return |
1447
|
|
|
|
1448
|
|
|
node = doc.getElementsByTagName("wiki")[0] |
1449
|
|
|
|
1450
|
|
|
return _extract(node, section) |
1451
|
|
|
|
1452
|
|
|
def get_shouts(self, limit=50, cacheable=False): |
1453
|
|
|
""" |
1454
|
|
|
Returns a sequence of Shout objects |
1455
|
|
|
""" |
1456
|
|
|
|
1457
|
|
|
shouts = [] |
1458
|
|
|
for node in _collect_nodes( |
1459
|
|
|
limit, |
1460
|
|
|
self, |
1461
|
|
|
self.ws_prefix + ".getShouts", |
1462
|
|
|
cacheable): |
1463
|
|
|
shouts.append( |
1464
|
|
|
Shout( |
1465
|
|
|
_extract(node, "body"), |
1466
|
|
|
User(_extract(node, "author"), self.network), |
1467
|
|
|
_extract(node, "date") |
1468
|
|
|
) |
1469
|
|
|
) |
1470
|
|
|
return shouts |
1471
|
|
|
|
1472
|
|
|
|
1473
|
|
|
class _Chartable(object): |
1474
|
|
|
"""Common functions for classes with charts.""" |
1475
|
|
|
|
1476
|
|
|
def __init__(self, ws_prefix): |
1477
|
|
|
self.ws_prefix = ws_prefix # TODO move to _BaseObject? |
1478
|
|
|
|
1479
|
|
|
def get_weekly_chart_dates(self): |
1480
|
|
|
"""Returns a list of From and To tuples for the available charts.""" |
1481
|
|
|
|
1482
|
|
|
doc = self._request(self.ws_prefix + ".getWeeklyChartList", True) |
1483
|
|
|
|
1484
|
|
|
seq = [] |
1485
|
|
|
for node in doc.getElementsByTagName("chart"): |
1486
|
|
|
seq.append((node.getAttribute("from"), node.getAttribute("to"))) |
1487
|
|
|
|
1488
|
|
|
return seq |
1489
|
|
|
|
1490
|
|
|
def get_weekly_album_charts(self, from_date=None, to_date=None): |
1491
|
|
|
""" |
1492
|
|
|
Returns the weekly album charts for the week starting from the |
1493
|
|
|
from_date value to the to_date value. |
1494
|
|
|
Only for Group or User. |
1495
|
|
|
""" |
1496
|
|
|
return self.get_weekly_charts("album", from_date, to_date) |
1497
|
|
|
|
1498
|
|
|
def get_weekly_artist_charts(self, from_date=None, to_date=None): |
1499
|
|
|
""" |
1500
|
|
|
Returns the weekly artist charts for the week starting from the |
1501
|
|
|
from_date value to the to_date value. |
1502
|
|
|
Only for Group, Tag or User. |
1503
|
|
|
""" |
1504
|
|
|
return self.get_weekly_charts("artist", from_date, to_date) |
1505
|
|
|
|
1506
|
|
|
def get_weekly_track_charts(self, from_date=None, to_date=None): |
1507
|
|
|
""" |
1508
|
|
|
Returns the weekly track charts for the week starting from the |
1509
|
|
|
from_date value to the to_date value. |
1510
|
|
|
Only for Group or User. |
1511
|
|
|
""" |
1512
|
|
|
return self.get_weekly_charts("track", from_date, to_date) |
1513
|
|
|
|
1514
|
|
|
def get_weekly_charts(self, chart_kind, from_date=None, to_date=None): |
1515
|
|
|
""" |
1516
|
|
|
Returns the weekly charts for the week starting from the |
1517
|
|
|
from_date value to the to_date value. |
1518
|
|
|
chart_kind should be one of "album", "artist" or "track" |
1519
|
|
|
""" |
1520
|
|
|
method = ".getWeekly" + chart_kind.title() + "Chart" |
1521
|
|
|
chart_type = eval(chart_kind.title()) # string to type |
1522
|
|
|
|
1523
|
|
|
params = self._get_params() |
1524
|
|
|
if from_date and to_date: |
1525
|
|
|
params["from"] = from_date |
1526
|
|
|
params["to"] = to_date |
1527
|
|
|
|
1528
|
|
|
doc = self._request( |
1529
|
|
|
self.ws_prefix + method, True, params) |
1530
|
|
|
|
1531
|
|
|
seq = [] |
1532
|
|
|
for node in doc.getElementsByTagName(chart_kind.lower()): |
1533
|
|
|
item = chart_type( |
1534
|
|
|
_extract(node, "artist"), _extract(node, "name"), self.network) |
1535
|
|
|
weight = _number(_extract(node, "playcount")) |
1536
|
|
|
seq.append(TopItem(item, weight)) |
1537
|
|
|
|
1538
|
|
|
return seq |
1539
|
|
|
|
1540
|
|
|
|
1541
|
|
|
class _Taggable(object): |
1542
|
|
|
"""Common functions for classes with tags.""" |
1543
|
|
|
|
1544
|
|
|
def __init__(self, ws_prefix): |
1545
|
|
|
self.ws_prefix = ws_prefix # TODO move to _BaseObject |
1546
|
|
|
|
1547
|
|
|
def add_tags(self, tags): |
1548
|
|
|
"""Adds one or several tags. |
1549
|
|
|
* tags: A sequence of tag names or Tag objects. |
1550
|
|
|
""" |
1551
|
|
|
|
1552
|
|
|
for tag in tags: |
1553
|
|
|
self.add_tag(tag) |
1554
|
|
|
|
1555
|
|
|
def add_tag(self, tag): |
1556
|
|
|
"""Adds one tag. |
1557
|
|
|
* tag: a tag name or a Tag object. |
1558
|
|
|
""" |
1559
|
|
|
|
1560
|
|
|
if isinstance(tag, Tag): |
1561
|
|
|
tag = tag.get_name() |
1562
|
|
|
|
1563
|
|
|
params = self._get_params() |
1564
|
|
|
params['tags'] = tag |
1565
|
|
|
|
1566
|
|
|
self._request(self.ws_prefix + '.addTags', False, params) |
1567
|
|
|
|
1568
|
|
|
def remove_tag(self, tag): |
1569
|
|
|
"""Remove a user's tag from this object.""" |
1570
|
|
|
|
1571
|
|
|
if isinstance(tag, Tag): |
1572
|
|
|
tag = tag.get_name() |
1573
|
|
|
|
1574
|
|
|
params = self._get_params() |
1575
|
|
|
params['tag'] = tag |
1576
|
|
|
|
1577
|
|
|
self._request(self.ws_prefix + '.removeTag', False, params) |
1578
|
|
|
|
1579
|
|
|
def get_tags(self): |
1580
|
|
|
"""Returns a list of the tags set by the user to this object.""" |
1581
|
|
|
|
1582
|
|
|
# Uncacheable because it can be dynamically changed by the user. |
1583
|
|
|
params = self._get_params() |
1584
|
|
|
|
1585
|
|
|
doc = self._request(self.ws_prefix + '.getTags', False, params) |
1586
|
|
|
tag_names = _extract_all(doc, 'name') |
1587
|
|
|
tags = [] |
1588
|
|
|
for tag in tag_names: |
1589
|
|
|
tags.append(Tag(tag, self.network)) |
1590
|
|
|
|
1591
|
|
|
return tags |
1592
|
|
|
|
1593
|
|
|
def remove_tags(self, tags): |
1594
|
|
|
"""Removes one or several tags from this object. |
1595
|
|
|
* tags: a sequence of tag names or Tag objects. |
1596
|
|
|
""" |
1597
|
|
|
|
1598
|
|
|
for tag in tags: |
1599
|
|
|
self.remove_tag(tag) |
1600
|
|
|
|
1601
|
|
|
def clear_tags(self): |
1602
|
|
|
"""Clears all the user-set tags. """ |
1603
|
|
|
|
1604
|
|
|
self.remove_tags(*(self.get_tags())) |
1605
|
|
|
|
1606
|
|
|
def set_tags(self, tags): |
1607
|
|
|
"""Sets this object's tags to only those tags. |
1608
|
|
|
* tags: a sequence of tag names or Tag objects. |
1609
|
|
|
""" |
1610
|
|
|
|
1611
|
|
|
c_old_tags = [] |
1612
|
|
|
old_tags = [] |
1613
|
|
|
c_new_tags = [] |
1614
|
|
|
new_tags = [] |
1615
|
|
|
|
1616
|
|
|
to_remove = [] |
1617
|
|
|
to_add = [] |
1618
|
|
|
|
1619
|
|
|
tags_on_server = self.get_tags() |
1620
|
|
|
|
1621
|
|
|
for tag in tags_on_server: |
1622
|
|
|
c_old_tags.append(tag.get_name().lower()) |
1623
|
|
|
old_tags.append(tag.get_name()) |
1624
|
|
|
|
1625
|
|
|
for tag in tags: |
1626
|
|
|
c_new_tags.append(tag.lower()) |
1627
|
|
|
new_tags.append(tag) |
1628
|
|
|
|
1629
|
|
|
for i in range(0, len(old_tags)): |
1630
|
|
|
if not c_old_tags[i] in c_new_tags: |
1631
|
|
|
to_remove.append(old_tags[i]) |
1632
|
|
|
|
1633
|
|
|
for i in range(0, len(new_tags)): |
1634
|
|
|
if not c_new_tags[i] in c_old_tags: |
1635
|
|
|
to_add.append(new_tags[i]) |
1636
|
|
|
|
1637
|
|
|
self.remove_tags(to_remove) |
1638
|
|
|
self.add_tags(to_add) |
1639
|
|
|
|
1640
|
|
|
def get_top_tags(self, limit=None): |
1641
|
|
|
"""Returns a list of the most frequently used Tags on this object.""" |
1642
|
|
|
|
1643
|
|
|
doc = self._request(self.ws_prefix + '.getTopTags', True) |
1644
|
|
|
|
1645
|
|
|
elements = doc.getElementsByTagName('tag') |
1646
|
|
|
seq = [] |
1647
|
|
|
|
1648
|
|
|
for element in elements: |
1649
|
|
|
tag_name = _extract(element, 'name') |
1650
|
|
|
tagcount = _extract(element, 'count') |
1651
|
|
|
|
1652
|
|
|
seq.append(TopItem(Tag(tag_name, self.network), tagcount)) |
1653
|
|
|
|
1654
|
|
|
if limit: |
1655
|
|
|
seq = seq[:limit] |
1656
|
|
|
|
1657
|
|
|
return seq |
1658
|
|
|
|
1659
|
|
|
|
1660
|
|
|
class WSError(Exception): |
1661
|
|
|
"""Exception related to the Network web service""" |
1662
|
|
|
|
1663
|
|
|
def __init__(self, network, status, details): |
1664
|
|
|
self.status = status |
1665
|
|
|
self.details = details |
1666
|
|
|
self.network = network |
1667
|
|
|
|
1668
|
|
|
@_string_output |
1669
|
|
|
def __str__(self): |
1670
|
|
|
return self.details |
1671
|
|
|
|
1672
|
|
|
def get_id(self): |
1673
|
|
|
"""Returns the exception ID, from one of the following: |
1674
|
|
|
STATUS_INVALID_SERVICE = 2 |
1675
|
|
|
STATUS_INVALID_METHOD = 3 |
1676
|
|
|
STATUS_AUTH_FAILED = 4 |
1677
|
|
|
STATUS_INVALID_FORMAT = 5 |
1678
|
|
|
STATUS_INVALID_PARAMS = 6 |
1679
|
|
|
STATUS_INVALID_RESOURCE = 7 |
1680
|
|
|
STATUS_TOKEN_ERROR = 8 |
1681
|
|
|
STATUS_INVALID_SK = 9 |
1682
|
|
|
STATUS_INVALID_API_KEY = 10 |
1683
|
|
|
STATUS_OFFLINE = 11 |
1684
|
|
|
STATUS_SUBSCRIBERS_ONLY = 12 |
1685
|
|
|
STATUS_TOKEN_UNAUTHORIZED = 14 |
1686
|
|
|
STATUS_TOKEN_EXPIRED = 15 |
1687
|
|
|
""" |
1688
|
|
|
|
1689
|
|
|
return self.status |
1690
|
|
|
|
1691
|
|
|
|
1692
|
|
|
class MalformedResponseError(Exception): |
1693
|
|
|
"""Exception conveying a malformed response from the music network.""" |
1694
|
|
|
|
1695
|
|
|
def __init__(self, network, underlying_error): |
1696
|
|
|
self.network = network |
1697
|
|
|
self.underlying_error = underlying_error |
1698
|
|
|
|
1699
|
|
|
def __str__(self): |
1700
|
|
|
return "Malformed response from {}. Underlying error: {}".format( |
1701
|
|
|
self.network.name, str(self.underlying_error)) |
1702
|
|
|
|
1703
|
|
|
|
1704
|
|
|
class NetworkError(Exception): |
1705
|
|
|
"""Exception conveying a problem in sending a request to Last.fm""" |
1706
|
|
|
|
1707
|
|
|
def __init__(self, network, underlying_error): |
1708
|
|
|
self.network = network |
1709
|
|
|
self.underlying_error = underlying_error |
1710
|
|
|
|
1711
|
|
|
def __str__(self): |
1712
|
|
|
return "NetworkError: %s" % str(self.underlying_error) |
1713
|
|
|
|
1714
|
|
|
|
1715
|
|
|
class _Opus(_BaseObject, _Taggable): |
1716
|
|
|
"""An album or track.""" |
1717
|
|
|
|
1718
|
|
|
artist = None |
1719
|
|
|
title = None |
1720
|
|
|
username = None |
1721
|
|
|
|
1722
|
|
|
__hash__ = _BaseObject.__hash__ |
1723
|
|
|
|
1724
|
|
|
def __init__(self, artist, title, network, ws_prefix, username=None): |
1725
|
|
|
""" |
1726
|
|
|
Create an opus instance. |
1727
|
|
|
# Parameters: |
1728
|
|
|
* artist: An artist name or an Artist object. |
1729
|
|
|
* title: The album or track title. |
1730
|
|
|
* ws_prefix: 'album' or 'track' |
1731
|
|
|
""" |
1732
|
|
|
|
1733
|
|
|
_BaseObject.__init__(self, network, ws_prefix) |
1734
|
|
|
_Taggable.__init__(self, ws_prefix) |
1735
|
|
|
|
1736
|
|
|
if isinstance(artist, Artist): |
1737
|
|
|
self.artist = artist |
1738
|
|
|
else: |
1739
|
|
|
self.artist = Artist(artist, self.network) |
1740
|
|
|
|
1741
|
|
|
self.title = title |
1742
|
|
|
self.username = username |
1743
|
|
|
|
1744
|
|
|
def __repr__(self): |
1745
|
|
|
return "pylast.%s(%s, %s, %s)" % ( |
1746
|
|
|
self.ws_prefix.title(), repr(self.artist.name), |
1747
|
|
|
repr(self.title), repr(self.network)) |
1748
|
|
|
|
1749
|
|
|
@_string_output |
1750
|
|
|
def __str__(self): |
1751
|
|
|
return _unicode("%s - %s") % ( |
1752
|
|
|
self.get_artist().get_name(), self.get_title()) |
1753
|
|
|
|
1754
|
|
|
def __eq__(self, other): |
1755
|
|
|
if type(self) != type(other): |
1756
|
|
|
return False |
1757
|
|
|
a = self.get_title().lower() |
1758
|
|
|
b = other.get_title().lower() |
1759
|
|
|
c = self.get_artist().get_name().lower() |
1760
|
|
|
d = other.get_artist().get_name().lower() |
1761
|
|
|
return (a == b) and (c == d) |
1762
|
|
|
|
1763
|
|
|
def __ne__(self, other): |
1764
|
|
|
return not self.__eq__(other) |
1765
|
|
|
|
1766
|
|
|
def _get_params(self): |
1767
|
|
|
return { |
1768
|
|
|
'artist': self.get_artist().get_name(), |
1769
|
|
|
self.ws_prefix: self.get_title()} |
1770
|
|
|
|
1771
|
|
|
def get_artist(self): |
1772
|
|
|
"""Returns the associated Artist object.""" |
1773
|
|
|
|
1774
|
|
|
return self.artist |
1775
|
|
|
|
1776
|
|
|
def get_title(self, properly_capitalized=False): |
1777
|
|
|
"""Returns the artist or track title.""" |
1778
|
|
|
if properly_capitalized: |
1779
|
|
|
self.title = _extract( |
1780
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
1781
|
|
|
|
1782
|
|
|
return self.title |
1783
|
|
|
|
1784
|
|
|
def get_name(self, properly_capitalized=False): |
1785
|
|
|
"""Returns the album or track title (alias to get_title()).""" |
1786
|
|
|
|
1787
|
|
|
return self.get_title(properly_capitalized) |
1788
|
|
|
|
1789
|
|
|
def get_id(self): |
1790
|
|
|
"""Returns the ID on the network.""" |
1791
|
|
|
|
1792
|
|
|
return _extract( |
1793
|
|
|
self._request(self.ws_prefix + ".getInfo", cacheable=True), "id") |
1794
|
|
|
|
1795
|
|
|
def get_playcount(self): |
1796
|
|
|
"""Returns the number of plays on the network""" |
1797
|
|
|
|
1798
|
|
|
return _number(_extract( |
1799
|
|
|
self._request( |
1800
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "playcount")) |
1801
|
|
|
|
1802
|
|
|
def get_userplaycount(self): |
1803
|
|
|
"""Returns the number of plays by a given username""" |
1804
|
|
|
|
1805
|
|
|
if not self.username: |
1806
|
|
|
return |
1807
|
|
|
|
1808
|
|
|
params = self._get_params() |
1809
|
|
|
params['username'] = self.username |
1810
|
|
|
|
1811
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
1812
|
|
|
return _number(_extract(doc, "userplaycount")) |
1813
|
|
|
|
1814
|
|
|
def get_listener_count(self): |
1815
|
|
|
"""Returns the number of listeners on the network""" |
1816
|
|
|
|
1817
|
|
|
return _number(_extract( |
1818
|
|
|
self._request( |
1819
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "listeners")) |
1820
|
|
|
|
1821
|
|
|
def get_mbid(self): |
1822
|
|
|
"""Returns the MusicBrainz ID of the album or track.""" |
1823
|
|
|
|
1824
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", cacheable=True) |
1825
|
|
|
|
1826
|
|
|
try: |
1827
|
|
|
lfm = doc.getElementsByTagName('lfm')[0] |
1828
|
|
|
opus = next(self._get_children_by_tag_name(lfm, self.ws_prefix)) |
1829
|
|
|
mbid = next(self._get_children_by_tag_name(opus, "mbid")) |
1830
|
|
|
return mbid.firstChild.nodeValue |
1831
|
|
|
except StopIteration: |
1832
|
|
|
return None |
1833
|
|
|
|
1834
|
|
|
def _get_children_by_tag_name(self, node, tag_name): |
1835
|
|
|
for child in node.childNodes: |
1836
|
|
|
if (child.nodeType == child.ELEMENT_NODE and |
1837
|
|
|
(tag_name == '*' or child.tagName == tag_name)): |
1838
|
|
|
yield child |
1839
|
|
|
|
1840
|
|
|
|
1841
|
|
|
class Album(_Opus): |
1842
|
|
|
"""An album.""" |
1843
|
|
|
|
1844
|
|
|
__hash__ = _Opus.__hash__ |
1845
|
|
|
|
1846
|
|
|
def __init__(self, artist, title, network, username=None): |
1847
|
|
|
super(Album, self).__init__(artist, title, network, "album", username) |
1848
|
|
|
|
1849
|
|
|
def get_release_date(self): |
1850
|
|
|
"""Returns the release date of the album.""" |
1851
|
|
|
|
1852
|
|
|
return _extract(self._request( |
1853
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "releasedate") |
1854
|
|
|
|
1855
|
|
|
def get_cover_image(self, size=COVER_EXTRA_LARGE): |
1856
|
|
|
""" |
1857
|
|
|
Returns a uri to the cover image |
1858
|
|
|
size can be one of: |
1859
|
|
|
COVER_EXTRA_LARGE |
1860
|
|
|
COVER_LARGE |
1861
|
|
|
COVER_MEDIUM |
1862
|
|
|
COVER_SMALL |
1863
|
|
|
""" |
1864
|
|
|
|
1865
|
|
|
return _extract_all( |
1866
|
|
|
self._request( |
1867
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), 'image')[size] |
1868
|
|
|
|
1869
|
|
|
def get_tracks(self): |
1870
|
|
|
"""Returns the list of Tracks on this album.""" |
1871
|
|
|
|
1872
|
|
|
return _extract_tracks( |
1873
|
|
|
self._request( |
1874
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "tracks") |
1875
|
|
|
|
1876
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
1877
|
|
|
"""Returns the URL of the album or track page on the network. |
1878
|
|
|
# Parameters: |
1879
|
|
|
* domain_name str: The network's language domain. Possible values: |
1880
|
|
|
o DOMAIN_ENGLISH |
1881
|
|
|
o DOMAIN_GERMAN |
1882
|
|
|
o DOMAIN_SPANISH |
1883
|
|
|
o DOMAIN_FRENCH |
1884
|
|
|
o DOMAIN_ITALIAN |
1885
|
|
|
o DOMAIN_POLISH |
1886
|
|
|
o DOMAIN_PORTUGUESE |
1887
|
|
|
o DOMAIN_SWEDISH |
1888
|
|
|
o DOMAIN_TURKISH |
1889
|
|
|
o DOMAIN_RUSSIAN |
1890
|
|
|
o DOMAIN_JAPANESE |
1891
|
|
|
o DOMAIN_CHINESE |
1892
|
|
|
""" |
1893
|
|
|
|
1894
|
|
|
artist = _url_safe(self.get_artist().get_name()) |
1895
|
|
|
title = _url_safe(self.get_title()) |
1896
|
|
|
|
1897
|
|
|
return self.network._get_url( |
1898
|
|
|
domain_name, self.ws_prefix) % { |
1899
|
|
|
'artist': artist, 'album': title} |
1900
|
|
|
|
1901
|
|
|
|
1902
|
|
|
class Artist(_BaseObject, _Taggable): |
1903
|
|
|
"""An artist.""" |
1904
|
|
|
|
1905
|
|
|
name = None |
1906
|
|
|
username = None |
1907
|
|
|
|
1908
|
|
|
__hash__ = _BaseObject.__hash__ |
1909
|
|
|
|
1910
|
|
|
def __init__(self, name, network, username=None): |
1911
|
|
|
"""Create an artist object. |
1912
|
|
|
# Parameters: |
1913
|
|
|
* name str: The artist's name. |
1914
|
|
|
""" |
1915
|
|
|
|
1916
|
|
|
_BaseObject.__init__(self, network, 'artist') |
1917
|
|
|
_Taggable.__init__(self, 'artist') |
1918
|
|
|
|
1919
|
|
|
self.name = name |
1920
|
|
|
self.username = username |
1921
|
|
|
|
1922
|
|
|
def __repr__(self): |
1923
|
|
|
return "pylast.Artist(%s, %s)" % ( |
1924
|
|
|
repr(self.get_name()), repr(self.network)) |
1925
|
|
|
|
1926
|
|
|
def __unicode__(self): |
1927
|
|
|
return six.text_type(self.get_name()) |
1928
|
|
|
|
1929
|
|
|
@_string_output |
1930
|
|
|
def __str__(self): |
1931
|
|
|
return self.__unicode__() |
1932
|
|
|
|
1933
|
|
|
def __eq__(self, other): |
1934
|
|
|
if type(self) is type(other): |
1935
|
|
|
return self.get_name().lower() == other.get_name().lower() |
1936
|
|
|
else: |
1937
|
|
|
return False |
1938
|
|
|
|
1939
|
|
|
def __ne__(self, other): |
1940
|
|
|
return not self.__eq__(other) |
1941
|
|
|
|
1942
|
|
|
def _get_params(self): |
1943
|
|
|
return {self.ws_prefix: self.get_name()} |
1944
|
|
|
|
1945
|
|
|
def get_name(self, properly_capitalized=False): |
1946
|
|
|
"""Returns the name of the artist. |
1947
|
|
|
If properly_capitalized was asserted then the name would be downloaded |
1948
|
|
|
overwriting the given one.""" |
1949
|
|
|
|
1950
|
|
|
if properly_capitalized: |
1951
|
|
|
self.name = _extract( |
1952
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
1953
|
|
|
|
1954
|
|
|
return self.name |
1955
|
|
|
|
1956
|
|
|
def get_correction(self): |
1957
|
|
|
"""Returns the corrected artist name.""" |
1958
|
|
|
|
1959
|
|
|
return _extract( |
1960
|
|
|
self._request(self.ws_prefix + ".getCorrection"), "name") |
1961
|
|
|
|
1962
|
|
|
def get_cover_image(self, size=COVER_MEGA): |
1963
|
|
|
""" |
1964
|
|
|
Returns a uri to the cover image |
1965
|
|
|
size can be one of: |
1966
|
|
|
COVER_MEGA |
1967
|
|
|
COVER_EXTRA_LARGE |
1968
|
|
|
COVER_LARGE |
1969
|
|
|
COVER_MEDIUM |
1970
|
|
|
COVER_SMALL |
1971
|
|
|
""" |
1972
|
|
|
|
1973
|
|
|
return _extract_all( |
1974
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "image")[size] |
1975
|
|
|
|
1976
|
|
|
def get_playcount(self): |
1977
|
|
|
"""Returns the number of plays on the network.""" |
1978
|
|
|
|
1979
|
|
|
return _number(_extract( |
1980
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "playcount")) |
1981
|
|
|
|
1982
|
|
|
def get_userplaycount(self): |
1983
|
|
|
"""Returns the number of plays by a given username""" |
1984
|
|
|
|
1985
|
|
|
if not self.username: |
1986
|
|
|
return |
1987
|
|
|
|
1988
|
|
|
params = self._get_params() |
1989
|
|
|
params['username'] = self.username |
1990
|
|
|
|
1991
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
1992
|
|
|
return _number(_extract(doc, "userplaycount")) |
1993
|
|
|
|
1994
|
|
|
def get_mbid(self): |
1995
|
|
|
"""Returns the MusicBrainz ID of this artist.""" |
1996
|
|
|
|
1997
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
1998
|
|
|
|
1999
|
|
|
return _extract(doc, "mbid") |
2000
|
|
|
|
2001
|
|
|
def get_listener_count(self): |
2002
|
|
|
"""Returns the number of listeners on the network.""" |
2003
|
|
|
|
2004
|
|
|
if hasattr(self, "listener_count"): |
2005
|
|
|
return self.listener_count |
2006
|
|
|
else: |
2007
|
|
|
self.listener_count = _number(_extract( |
2008
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "listeners")) |
2009
|
|
|
return self.listener_count |
2010
|
|
|
|
2011
|
|
|
def is_streamable(self): |
2012
|
|
|
"""Returns True if the artist is streamable.""" |
2013
|
|
|
|
2014
|
|
|
return bool(_number(_extract( |
2015
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "streamable"))) |
2016
|
|
|
|
2017
|
|
|
def get_bio(self, section, language=None): |
2018
|
|
|
""" |
2019
|
|
|
Returns a section of the bio. |
2020
|
|
|
section can be "content", "summary" or |
2021
|
|
|
"published" (for published date) |
2022
|
|
|
""" |
2023
|
|
|
if language: |
2024
|
|
|
params = self._get_params() |
2025
|
|
|
params["lang"] = language |
2026
|
|
|
else: |
2027
|
|
|
params = None |
2028
|
|
|
|
2029
|
|
|
return self._extract_cdata_from_request( |
2030
|
|
|
self.ws_prefix + ".getInfo", section, params) |
2031
|
|
|
|
2032
|
|
|
def get_bio_published_date(self): |
2033
|
|
|
"""Returns the date on which the artist's biography was published.""" |
2034
|
|
|
return self.get_bio("published") |
2035
|
|
|
|
2036
|
|
|
def get_bio_summary(self, language=None): |
2037
|
|
|
"""Returns the summary of the artist's biography.""" |
2038
|
|
|
return self.get_bio("summary", language) |
2039
|
|
|
|
2040
|
|
|
def get_bio_content(self, language=None): |
2041
|
|
|
"""Returns the content of the artist's biography.""" |
2042
|
|
|
return self.get_bio("content", language) |
2043
|
|
|
|
2044
|
|
|
def get_upcoming_events(self): |
2045
|
|
|
"""Returns a list of the upcoming Events for this artist.""" |
2046
|
|
|
|
2047
|
|
|
doc = self._request(self.ws_prefix + '.getEvents', True) |
2048
|
|
|
|
2049
|
|
|
return _extract_events_from_doc(doc, self.network) |
2050
|
|
|
|
2051
|
|
|
def get_similar(self, limit=None): |
2052
|
|
|
"""Returns the similar artists on the network.""" |
2053
|
|
|
|
2054
|
|
|
params = self._get_params() |
2055
|
|
|
if limit: |
2056
|
|
|
params['limit'] = limit |
2057
|
|
|
|
2058
|
|
|
doc = self._request(self.ws_prefix + '.getSimilar', True, params) |
2059
|
|
|
|
2060
|
|
|
names = _extract_all(doc, "name") |
2061
|
|
|
matches = _extract_all(doc, "match") |
2062
|
|
|
|
2063
|
|
|
artists = [] |
2064
|
|
|
for i in range(0, len(names)): |
2065
|
|
|
artists.append(SimilarItem( |
2066
|
|
|
Artist(names[i], self.network), _number(matches[i]))) |
2067
|
|
|
|
2068
|
|
|
return artists |
2069
|
|
|
|
2070
|
|
|
def get_top_albums(self, limit=None, cacheable=True): |
2071
|
|
|
"""Returns a list of the top albums.""" |
2072
|
|
|
params = self._get_params() |
2073
|
|
|
if limit: |
2074
|
|
|
params['limit'] = limit |
2075
|
|
|
|
2076
|
|
|
return self._get_things( |
2077
|
|
|
"getTopAlbums", "album", Album, params, cacheable) |
2078
|
|
|
|
2079
|
|
|
def get_top_tracks(self, limit=None, cacheable=True): |
2080
|
|
|
"""Returns a list of the most played Tracks by this artist.""" |
2081
|
|
|
params = self._get_params() |
2082
|
|
|
if limit: |
2083
|
|
|
params['limit'] = limit |
2084
|
|
|
|
2085
|
|
|
return self._get_things( |
2086
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2087
|
|
|
|
2088
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2089
|
|
|
"""Returns the url of the artist page on the network. |
2090
|
|
|
# Parameters: |
2091
|
|
|
* domain_name: The network's language domain. Possible values: |
2092
|
|
|
o DOMAIN_ENGLISH |
2093
|
|
|
o DOMAIN_GERMAN |
2094
|
|
|
o DOMAIN_SPANISH |
2095
|
|
|
o DOMAIN_FRENCH |
2096
|
|
|
o DOMAIN_ITALIAN |
2097
|
|
|
o DOMAIN_POLISH |
2098
|
|
|
o DOMAIN_PORTUGUESE |
2099
|
|
|
o DOMAIN_SWEDISH |
2100
|
|
|
o DOMAIN_TURKISH |
2101
|
|
|
o DOMAIN_RUSSIAN |
2102
|
|
|
o DOMAIN_JAPANESE |
2103
|
|
|
o DOMAIN_CHINESE |
2104
|
|
|
""" |
2105
|
|
|
|
2106
|
|
|
artist = _url_safe(self.get_name()) |
2107
|
|
|
|
2108
|
|
|
return self.network._get_url( |
2109
|
|
|
domain_name, "artist") % {'artist': artist} |
2110
|
|
|
|
2111
|
|
|
def shout(self, message): |
2112
|
|
|
""" |
2113
|
|
|
Post a shout |
2114
|
|
|
""" |
2115
|
|
|
|
2116
|
|
|
params = self._get_params() |
2117
|
|
|
params["message"] = message |
2118
|
|
|
|
2119
|
|
|
self._request("artist.Shout", False, params) |
2120
|
|
|
|
2121
|
|
|
def get_band_members(self): |
2122
|
|
|
"""Returns a list of band members or None if unknown.""" |
2123
|
|
|
|
2124
|
|
|
names = None |
2125
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
2126
|
|
|
|
2127
|
|
|
for node in doc.getElementsByTagName("bandmembers"): |
2128
|
|
|
names = _extract_all(node, "name") |
2129
|
|
|
|
2130
|
|
|
return names |
2131
|
|
|
|
2132
|
|
|
|
2133
|
|
|
class Event(_BaseObject): |
2134
|
|
|
"""An event.""" |
2135
|
|
|
|
2136
|
|
|
id = None |
2137
|
|
|
|
2138
|
|
|
__hash__ = _BaseObject.__hash__ |
2139
|
|
|
|
2140
|
|
|
def __init__(self, event_id, network): |
2141
|
|
|
_BaseObject.__init__(self, network, 'event') |
2142
|
|
|
|
2143
|
|
|
self.id = event_id |
2144
|
|
|
|
2145
|
|
|
def __repr__(self): |
2146
|
|
|
return "pylast.Event(%s, %s)" % (repr(self.id), repr(self.network)) |
2147
|
|
|
|
2148
|
|
|
@_string_output |
2149
|
|
|
def __str__(self): |
2150
|
|
|
return "Event #" + str(self.get_id()) |
2151
|
|
|
|
2152
|
|
|
def __eq__(self, other): |
2153
|
|
|
if type(self) is type(other): |
2154
|
|
|
return self.get_id() == other.get_id() |
2155
|
|
|
else: |
2156
|
|
|
return False |
2157
|
|
|
|
2158
|
|
|
def __ne__(self, other): |
2159
|
|
|
return not self.__eq__(other) |
2160
|
|
|
|
2161
|
|
|
def _get_params(self): |
2162
|
|
|
return {'event': self.get_id()} |
2163
|
|
|
|
2164
|
|
|
def attend(self, attending_status): |
2165
|
|
|
"""Sets the attending status. |
2166
|
|
|
* attending_status: The attending status. Possible values: |
2167
|
|
|
o EVENT_ATTENDING |
2168
|
|
|
o EVENT_MAYBE_ATTENDING |
2169
|
|
|
o EVENT_NOT_ATTENDING |
2170
|
|
|
""" |
2171
|
|
|
|
2172
|
|
|
params = self._get_params() |
2173
|
|
|
params['status'] = attending_status |
2174
|
|
|
|
2175
|
|
|
self._request('event.attend', False, params) |
2176
|
|
|
|
2177
|
|
|
def get_attendees(self): |
2178
|
|
|
""" |
2179
|
|
|
Get a list of attendees for an event |
2180
|
|
|
""" |
2181
|
|
|
|
2182
|
|
|
doc = self._request("event.getAttendees", False) |
2183
|
|
|
|
2184
|
|
|
users = [] |
2185
|
|
|
for name in _extract_all(doc, "name"): |
2186
|
|
|
users.append(User(name, self.network)) |
2187
|
|
|
|
2188
|
|
|
return users |
2189
|
|
|
|
2190
|
|
|
def get_id(self): |
2191
|
|
|
"""Returns the id of the event on the network. """ |
2192
|
|
|
|
2193
|
|
|
return self.id |
2194
|
|
|
|
2195
|
|
|
def get_title(self): |
2196
|
|
|
"""Returns the title of the event. """ |
2197
|
|
|
|
2198
|
|
|
doc = self._request("event.getInfo", True) |
2199
|
|
|
|
2200
|
|
|
return _extract(doc, "title") |
2201
|
|
|
|
2202
|
|
|
def get_headliner(self): |
2203
|
|
|
"""Returns the headliner of the event. """ |
2204
|
|
|
|
2205
|
|
|
doc = self._request("event.getInfo", True) |
2206
|
|
|
|
2207
|
|
|
return Artist(_extract(doc, "headliner"), self.network) |
2208
|
|
|
|
2209
|
|
|
def get_artists(self): |
2210
|
|
|
"""Returns a list of the participating Artists. """ |
2211
|
|
|
|
2212
|
|
|
doc = self._request("event.getInfo", True) |
2213
|
|
|
names = _extract_all(doc, "artist") |
2214
|
|
|
|
2215
|
|
|
artists = [] |
2216
|
|
|
for name in names: |
2217
|
|
|
artists.append(Artist(name, self.network)) |
2218
|
|
|
|
2219
|
|
|
return artists |
2220
|
|
|
|
2221
|
|
|
def get_venue(self): |
2222
|
|
|
"""Returns the venue where the event is held.""" |
2223
|
|
|
|
2224
|
|
|
doc = self._request("event.getInfo", True) |
2225
|
|
|
|
2226
|
|
|
v = doc.getElementsByTagName("venue")[0] |
2227
|
|
|
venue_id = _number(_extract(v, "id")) |
2228
|
|
|
|
2229
|
|
|
return Venue(venue_id, self.network, venue_element=v) |
2230
|
|
|
|
2231
|
|
|
def get_start_date(self): |
2232
|
|
|
"""Returns the date when the event starts.""" |
2233
|
|
|
|
2234
|
|
|
doc = self._request("event.getInfo", True) |
2235
|
|
|
|
2236
|
|
|
return _extract(doc, "startDate") |
2237
|
|
|
|
2238
|
|
|
def get_description(self): |
2239
|
|
|
"""Returns the description of the event. """ |
2240
|
|
|
|
2241
|
|
|
doc = self._request("event.getInfo", True) |
2242
|
|
|
|
2243
|
|
|
return _extract(doc, "description") |
2244
|
|
|
|
2245
|
|
|
def get_cover_image(self, size=COVER_MEGA): |
2246
|
|
|
""" |
2247
|
|
|
Returns a uri to the cover image |
2248
|
|
|
size can be one of: |
2249
|
|
|
COVER_MEGA |
2250
|
|
|
COVER_EXTRA_LARGE |
2251
|
|
|
COVER_LARGE |
2252
|
|
|
COVER_MEDIUM |
2253
|
|
|
COVER_SMALL |
2254
|
|
|
""" |
2255
|
|
|
|
2256
|
|
|
doc = self._request("event.getInfo", True) |
2257
|
|
|
|
2258
|
|
|
return _extract_all(doc, "image")[size] |
2259
|
|
|
|
2260
|
|
|
def get_attendance_count(self): |
2261
|
|
|
"""Returns the number of attending people. """ |
2262
|
|
|
|
2263
|
|
|
doc = self._request("event.getInfo", True) |
2264
|
|
|
|
2265
|
|
|
return _number(_extract(doc, "attendance")) |
2266
|
|
|
|
2267
|
|
|
def get_review_count(self): |
2268
|
|
|
"""Returns the number of available reviews for this event. """ |
2269
|
|
|
|
2270
|
|
|
doc = self._request("event.getInfo", True) |
2271
|
|
|
|
2272
|
|
|
return _number(_extract(doc, "reviews")) |
2273
|
|
|
|
2274
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2275
|
|
|
"""Returns the url of the event page on the network. |
2276
|
|
|
* domain_name: The network's language domain. Possible values: |
2277
|
|
|
o DOMAIN_ENGLISH |
2278
|
|
|
o DOMAIN_GERMAN |
2279
|
|
|
o DOMAIN_SPANISH |
2280
|
|
|
o DOMAIN_FRENCH |
2281
|
|
|
o DOMAIN_ITALIAN |
2282
|
|
|
o DOMAIN_POLISH |
2283
|
|
|
o DOMAIN_PORTUGUESE |
2284
|
|
|
o DOMAIN_SWEDISH |
2285
|
|
|
o DOMAIN_TURKISH |
2286
|
|
|
o DOMAIN_RUSSIAN |
2287
|
|
|
o DOMAIN_JAPANESE |
2288
|
|
|
o DOMAIN_CHINESE |
2289
|
|
|
""" |
2290
|
|
|
|
2291
|
|
|
return self.network._get_url( |
2292
|
|
|
domain_name, "event") % {'id': self.get_id()} |
2293
|
|
|
|
2294
|
|
|
def shout(self, message): |
2295
|
|
|
""" |
2296
|
|
|
Post a shout |
2297
|
|
|
""" |
2298
|
|
|
|
2299
|
|
|
params = self._get_params() |
2300
|
|
|
params["message"] = message |
2301
|
|
|
|
2302
|
|
|
self._request("event.Shout", False, params) |
2303
|
|
|
|
2304
|
|
|
|
2305
|
|
|
class Country(_BaseObject): |
2306
|
|
|
"""A country at Last.fm.""" |
2307
|
|
|
|
2308
|
|
|
name = None |
2309
|
|
|
|
2310
|
|
|
__hash__ = _BaseObject.__hash__ |
2311
|
|
|
|
2312
|
|
|
def __init__(self, name, network): |
2313
|
|
|
_BaseObject.__init__(self, network, "geo") |
2314
|
|
|
|
2315
|
|
|
self.name = name |
2316
|
|
|
|
2317
|
|
|
def __repr__(self): |
2318
|
|
|
return "pylast.Country(%s, %s)" % (repr(self.name), repr(self.network)) |
2319
|
|
|
|
2320
|
|
|
@_string_output |
2321
|
|
|
def __str__(self): |
2322
|
|
|
return self.get_name() |
2323
|
|
|
|
2324
|
|
|
def __eq__(self, other): |
2325
|
|
|
return self.get_name().lower() == other.get_name().lower() |
2326
|
|
|
|
2327
|
|
|
def __ne__(self, other): |
2328
|
|
|
return self.get_name() != other.get_name() |
2329
|
|
|
|
2330
|
|
|
def _get_params(self): # TODO can move to _BaseObject |
2331
|
|
|
return {'country': self.get_name()} |
2332
|
|
|
|
2333
|
|
|
def _get_name_from_code(self, alpha2code): |
2334
|
|
|
# TODO: Have this function lookup the alpha-2 code and return the |
2335
|
|
|
# country name. |
2336
|
|
|
|
2337
|
|
|
return alpha2code |
2338
|
|
|
|
2339
|
|
|
def get_name(self): |
2340
|
|
|
"""Returns the country name. """ |
2341
|
|
|
|
2342
|
|
|
return self.name |
2343
|
|
|
|
2344
|
|
|
def get_top_artists(self, limit=None, cacheable=True): |
2345
|
|
|
"""Returns a sequence of the most played artists.""" |
2346
|
|
|
params = self._get_params() |
2347
|
|
|
if limit: |
2348
|
|
|
params['limit'] = limit |
2349
|
|
|
|
2350
|
|
|
doc = self._request('geo.getTopArtists', cacheable, params) |
2351
|
|
|
|
2352
|
|
|
return _extract_top_artists(doc, self) |
2353
|
|
|
|
2354
|
|
|
def get_top_tracks(self, limit=None, cacheable=True): |
2355
|
|
|
"""Returns a sequence of the most played tracks""" |
2356
|
|
|
params = self._get_params() |
2357
|
|
|
if limit: |
2358
|
|
|
params['limit'] = limit |
2359
|
|
|
|
2360
|
|
|
return self._get_things( |
2361
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2362
|
|
|
|
2363
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2364
|
|
|
"""Returns the url of the event page on the network. |
2365
|
|
|
* domain_name: The network's language domain. Possible values: |
2366
|
|
|
o DOMAIN_ENGLISH |
2367
|
|
|
o DOMAIN_GERMAN |
2368
|
|
|
o DOMAIN_SPANISH |
2369
|
|
|
o DOMAIN_FRENCH |
2370
|
|
|
o DOMAIN_ITALIAN |
2371
|
|
|
o DOMAIN_POLISH |
2372
|
|
|
o DOMAIN_PORTUGUESE |
2373
|
|
|
o DOMAIN_SWEDISH |
2374
|
|
|
o DOMAIN_TURKISH |
2375
|
|
|
o DOMAIN_RUSSIAN |
2376
|
|
|
o DOMAIN_JAPANESE |
2377
|
|
|
o DOMAIN_CHINESE |
2378
|
|
|
""" |
2379
|
|
|
|
2380
|
|
|
country_name = _url_safe(self.get_name()) |
2381
|
|
|
|
2382
|
|
|
return self.network._get_url( |
2383
|
|
|
domain_name, "country") % {'country_name': country_name} |
2384
|
|
|
|
2385
|
|
|
|
2386
|
|
|
class Metro(_BaseObject): |
2387
|
|
|
"""A metro at Last.fm.""" |
2388
|
|
|
|
2389
|
|
|
name = None |
2390
|
|
|
country = None |
2391
|
|
|
|
2392
|
|
|
__hash__ = _BaseObject.__hash__ |
2393
|
|
|
|
2394
|
|
|
def __init__(self, name, country, network): |
2395
|
|
|
_BaseObject.__init__(self, network, None) |
2396
|
|
|
|
2397
|
|
|
self.name = name |
2398
|
|
|
self.country = country |
2399
|
|
|
|
2400
|
|
|
def __repr__(self): |
2401
|
|
|
return "pylast.Metro(%s, %s, %s)" % ( |
2402
|
|
|
repr(self.name), repr(self.country), repr(self.network)) |
2403
|
|
|
|
2404
|
|
|
@_string_output |
2405
|
|
|
def __str__(self): |
2406
|
|
|
return self.get_name() + ", " + self.get_country() |
2407
|
|
|
|
2408
|
|
|
def __eq__(self, other): |
2409
|
|
|
return (self.get_name().lower() == other.get_name().lower() and |
2410
|
|
|
self.get_country().lower() == other.get_country().lower()) |
2411
|
|
|
|
2412
|
|
|
def __ne__(self, other): |
2413
|
|
|
return (self.get_name() != other.get_name() or |
2414
|
|
|
self.get_country().lower() != other.get_country().lower()) |
2415
|
|
|
|
2416
|
|
|
def _get_params(self): |
2417
|
|
|
return {'metro': self.get_name(), 'country': self.get_country()} |
2418
|
|
|
|
2419
|
|
|
def get_name(self): |
2420
|
|
|
"""Returns the metro name.""" |
2421
|
|
|
|
2422
|
|
|
return self.name |
2423
|
|
|
|
2424
|
|
|
def get_country(self): |
2425
|
|
|
"""Returns the metro country.""" |
2426
|
|
|
|
2427
|
|
|
return self.country |
2428
|
|
|
|
2429
|
|
View Code Duplication |
def _get_chart( |
|
|
|
|
2430
|
|
|
self, method, tag="artist", limit=None, from_date=None, |
2431
|
|
|
to_date=None, cacheable=True): |
2432
|
|
|
"""Internal helper for getting geo charts.""" |
2433
|
|
|
params = self._get_params() |
2434
|
|
|
if limit: |
2435
|
|
|
params["limit"] = limit |
2436
|
|
|
if from_date and to_date: |
2437
|
|
|
params["from"] = from_date |
2438
|
|
|
params["to"] = to_date |
2439
|
|
|
|
2440
|
|
|
doc = self._request(method, cacheable, params) |
2441
|
|
|
|
2442
|
|
|
seq = [] |
2443
|
|
|
for node in doc.getElementsByTagName(tag): |
2444
|
|
|
if tag == "artist": |
2445
|
|
|
item = Artist(_extract(node, "name"), self.network) |
2446
|
|
|
elif tag == "track": |
2447
|
|
|
title = _extract(node, "name") |
2448
|
|
|
artist = _extract_element_tree(node).get('artist')['name'] |
2449
|
|
|
item = Track(artist, title, self.network) |
2450
|
|
|
else: |
2451
|
|
|
return None |
2452
|
|
|
weight = _number(_extract(node, "listeners")) |
2453
|
|
|
seq.append(TopItem(item, weight)) |
2454
|
|
|
|
2455
|
|
|
return seq |
2456
|
|
|
|
2457
|
|
|
def get_artist_chart( |
2458
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2459
|
|
|
cacheable=True): |
2460
|
|
|
"""Get a chart of artists for a metro. |
2461
|
|
|
Parameters: |
2462
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2463
|
|
|
requested |
2464
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2465
|
|
|
limit (Optional) : The number of results to fetch per page. |
2466
|
|
|
Defaults to 50. |
2467
|
|
|
""" |
2468
|
|
|
return self._get_chart( |
2469
|
|
|
"geo.getMetroArtistChart", tag=tag, limit=limit, |
2470
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2471
|
|
|
|
2472
|
|
|
def get_hype_artist_chart( |
2473
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2474
|
|
|
cacheable=True): |
2475
|
|
|
"""Get a chart of hyped (up and coming) artists for a metro. |
2476
|
|
|
Parameters: |
2477
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2478
|
|
|
requested |
2479
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2480
|
|
|
limit (Optional) : The number of results to fetch per page. |
2481
|
|
|
Defaults to 50. |
2482
|
|
|
""" |
2483
|
|
|
return self._get_chart( |
2484
|
|
|
"geo.getMetroHypeArtistChart", tag=tag, limit=limit, |
2485
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2486
|
|
|
|
2487
|
|
|
def get_unique_artist_chart( |
2488
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2489
|
|
|
cacheable=True): |
2490
|
|
|
"""Get a chart of the artists which make that metro unique. |
2491
|
|
|
Parameters: |
2492
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2493
|
|
|
requested |
2494
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2495
|
|
|
limit (Optional) : The number of results to fetch per page. |
2496
|
|
|
Defaults to 50. |
2497
|
|
|
""" |
2498
|
|
|
return self._get_chart( |
2499
|
|
|
"geo.getMetroUniqueArtistChart", tag=tag, limit=limit, |
2500
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2501
|
|
|
|
2502
|
|
|
def get_track_chart( |
2503
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2504
|
|
|
cacheable=True): |
2505
|
|
|
"""Get a chart of tracks for a metro. |
2506
|
|
|
Parameters: |
2507
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2508
|
|
|
requested |
2509
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2510
|
|
|
limit (Optional) : The number of results to fetch per page. |
2511
|
|
|
Defaults to 50. |
2512
|
|
|
""" |
2513
|
|
|
return self._get_chart( |
2514
|
|
|
"geo.getMetroTrackChart", tag=tag, limit=limit, |
2515
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2516
|
|
|
|
2517
|
|
|
def get_hype_track_chart( |
2518
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2519
|
|
|
cacheable=True): |
2520
|
|
|
"""Get a chart of tracks for a metro. |
2521
|
|
|
Parameters: |
2522
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2523
|
|
|
requested |
2524
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2525
|
|
|
limit (Optional) : The number of results to fetch per page. |
2526
|
|
|
Defaults to 50. |
2527
|
|
|
""" |
2528
|
|
|
return self._get_chart( |
2529
|
|
|
"geo.getMetroHypeTrackChart", tag=tag, |
2530
|
|
|
limit=limit, from_date=from_date, to_date=to_date, |
2531
|
|
|
cacheable=cacheable) |
2532
|
|
|
|
2533
|
|
|
def get_unique_track_chart( |
2534
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2535
|
|
|
cacheable=True): |
2536
|
|
|
"""Get a chart of tracks for a metro. |
2537
|
|
|
Parameters: |
2538
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2539
|
|
|
requested |
2540
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2541
|
|
|
limit (Optional) : The number of results to fetch per page. |
2542
|
|
|
Defaults to 50. |
2543
|
|
|
""" |
2544
|
|
|
return self._get_chart( |
2545
|
|
|
"geo.getMetroUniqueTrackChart", tag=tag, limit=limit, |
2546
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2547
|
|
|
|
2548
|
|
|
|
2549
|
|
|
class Library(_BaseObject): |
2550
|
|
|
"""A user's Last.fm library.""" |
2551
|
|
|
|
2552
|
|
|
user = None |
2553
|
|
|
|
2554
|
|
|
__hash__ = _BaseObject.__hash__ |
2555
|
|
|
|
2556
|
|
|
def __init__(self, user, network): |
2557
|
|
|
_BaseObject.__init__(self, network, 'library') |
2558
|
|
|
|
2559
|
|
|
if isinstance(user, User): |
2560
|
|
|
self.user = user |
2561
|
|
|
else: |
2562
|
|
|
self.user = User(user, self.network) |
2563
|
|
|
|
2564
|
|
|
self._albums_index = 0 |
2565
|
|
|
self._artists_index = 0 |
2566
|
|
|
self._tracks_index = 0 |
2567
|
|
|
|
2568
|
|
|
def __repr__(self): |
2569
|
|
|
return "pylast.Library(%s, %s)" % (repr(self.user), repr(self.network)) |
2570
|
|
|
|
2571
|
|
|
@_string_output |
2572
|
|
|
def __str__(self): |
2573
|
|
|
return repr(self.get_user()) + "'s Library" |
2574
|
|
|
|
2575
|
|
|
def _get_params(self): |
2576
|
|
|
return {'user': self.user.get_name()} |
2577
|
|
|
|
2578
|
|
|
def get_user(self): |
2579
|
|
|
"""Returns the user who owns this library.""" |
2580
|
|
|
|
2581
|
|
|
return self.user |
2582
|
|
|
|
2583
|
|
|
def add_album(self, album): |
2584
|
|
|
"""Add an album to this library.""" |
2585
|
|
|
|
2586
|
|
|
params = self._get_params() |
2587
|
|
|
params["artist"] = album.get_artist().get_name() |
2588
|
|
|
params["album"] = album.get_name() |
2589
|
|
|
|
2590
|
|
|
self._request("library.addAlbum", False, params) |
2591
|
|
|
|
2592
|
|
|
def remove_album(self, album): |
2593
|
|
|
"""Remove an album from this library.""" |
2594
|
|
|
|
2595
|
|
|
params = self._get_params() |
2596
|
|
|
params["artist"] = album.get_artist().get_name() |
2597
|
|
|
params["album"] = album.get_name() |
2598
|
|
|
|
2599
|
|
|
self._request(self.ws_prefix + ".removeAlbum", False, params) |
2600
|
|
|
|
2601
|
|
|
def add_artist(self, artist): |
2602
|
|
|
"""Add an artist to this library.""" |
2603
|
|
|
|
2604
|
|
|
params = self._get_params() |
2605
|
|
|
if type(artist) == str: |
2606
|
|
|
params["artist"] = artist |
2607
|
|
|
else: |
2608
|
|
|
params["artist"] = artist.get_name() |
2609
|
|
|
|
2610
|
|
|
self._request(self.ws_prefix + ".addArtist", False, params) |
2611
|
|
|
|
2612
|
|
|
def remove_artist(self, artist): |
2613
|
|
|
"""Remove an artist from this library.""" |
2614
|
|
|
|
2615
|
|
|
params = self._get_params() |
2616
|
|
|
if type(artist) == str: |
2617
|
|
|
params["artist"] = artist |
2618
|
|
|
else: |
2619
|
|
|
params["artist"] = artist.get_name() |
2620
|
|
|
|
2621
|
|
|
self._request(self.ws_prefix + ".removeArtist", False, params) |
2622
|
|
|
|
2623
|
|
|
def add_track(self, track): |
2624
|
|
|
"""Add a track to this library.""" |
2625
|
|
|
|
2626
|
|
|
params = self._get_params() |
2627
|
|
|
params["track"] = track.get_title() |
2628
|
|
|
|
2629
|
|
|
self._request(self.ws_prefix + ".addTrack", False, params) |
2630
|
|
|
|
2631
|
|
|
def get_albums(self, artist=None, limit=50, cacheable=True): |
2632
|
|
|
""" |
2633
|
|
|
Returns a sequence of Album objects |
2634
|
|
|
If no artist is specified, it will return all, sorted by decreasing |
2635
|
|
|
play count. |
2636
|
|
|
If limit==None it will return all (may take a while) |
2637
|
|
|
""" |
2638
|
|
|
|
2639
|
|
|
params = self._get_params() |
2640
|
|
|
if artist: |
2641
|
|
|
params["artist"] = artist |
2642
|
|
|
|
2643
|
|
|
seq = [] |
2644
|
|
|
for node in _collect_nodes( |
2645
|
|
|
limit, |
2646
|
|
|
self, |
2647
|
|
|
self.ws_prefix + ".getAlbums", |
2648
|
|
|
cacheable, |
2649
|
|
|
params): |
2650
|
|
|
name = _extract(node, "name") |
2651
|
|
|
artist = _extract(node, "name", 1) |
2652
|
|
|
playcount = _number(_extract(node, "playcount")) |
2653
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2654
|
|
|
|
2655
|
|
|
seq.append(LibraryItem( |
2656
|
|
|
Album(artist, name, self.network), playcount, tagcount)) |
2657
|
|
|
|
2658
|
|
|
return seq |
2659
|
|
|
|
2660
|
|
|
def get_artists(self, limit=50, cacheable=True): |
2661
|
|
|
""" |
2662
|
|
|
Returns a sequence of Album objects |
2663
|
|
|
if limit==None it will return all (may take a while) |
2664
|
|
|
""" |
2665
|
|
|
|
2666
|
|
|
seq = [] |
2667
|
|
|
for node in _collect_nodes( |
2668
|
|
|
limit, |
2669
|
|
|
self, |
2670
|
|
|
self.ws_prefix + ".getArtists", |
2671
|
|
|
cacheable): |
2672
|
|
|
name = _extract(node, "name") |
2673
|
|
|
|
2674
|
|
|
playcount = _number(_extract(node, "playcount")) |
2675
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2676
|
|
|
|
2677
|
|
|
seq.append(LibraryItem( |
2678
|
|
|
Artist(name, self.network), playcount, tagcount)) |
2679
|
|
|
|
2680
|
|
|
return seq |
2681
|
|
|
|
2682
|
|
View Code Duplication |
def get_tracks(self, artist=None, album=None, limit=50, cacheable=True): |
|
|
|
|
2683
|
|
|
""" |
2684
|
|
|
Returns a sequence of Album objects |
2685
|
|
|
If limit==None it will return all (may take a while) |
2686
|
|
|
""" |
2687
|
|
|
|
2688
|
|
|
params = self._get_params() |
2689
|
|
|
if artist: |
2690
|
|
|
params["artist"] = artist |
2691
|
|
|
if album: |
2692
|
|
|
params["album"] = album |
2693
|
|
|
|
2694
|
|
|
seq = [] |
2695
|
|
|
for node in _collect_nodes( |
2696
|
|
|
limit, |
2697
|
|
|
self, |
2698
|
|
|
self.ws_prefix + ".getTracks", |
2699
|
|
|
cacheable, |
2700
|
|
|
params): |
2701
|
|
|
name = _extract(node, "name") |
2702
|
|
|
artist = _extract(node, "name", 1) |
2703
|
|
|
playcount = _number(_extract(node, "playcount")) |
2704
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2705
|
|
|
|
2706
|
|
|
seq.append(LibraryItem( |
2707
|
|
|
Track(artist, name, self.network), playcount, tagcount)) |
2708
|
|
|
|
2709
|
|
|
return seq |
2710
|
|
|
|
2711
|
|
|
def remove_scrobble(self, artist, title, timestamp): |
2712
|
|
|
"""Remove a scrobble from a user's Last.fm library. Parameters: |
2713
|
|
|
artist (Required) : The artist that composed the track |
2714
|
|
|
title (Required) : The name of the track |
2715
|
|
|
timestamp (Required) : The unix timestamp of the scrobble |
2716
|
|
|
that you wish to remove |
2717
|
|
|
""" |
2718
|
|
|
|
2719
|
|
|
params = self._get_params() |
2720
|
|
|
params["artist"] = artist |
2721
|
|
|
params["track"] = title |
2722
|
|
|
params["timestamp"] = timestamp |
2723
|
|
|
|
2724
|
|
|
self._request(self.ws_prefix + ".removeScrobble", False, params) |
2725
|
|
|
|
2726
|
|
|
|
2727
|
|
|
class Playlist(_BaseObject): |
2728
|
|
|
"""A Last.fm user playlist.""" |
2729
|
|
|
|
2730
|
|
|
id = None |
2731
|
|
|
user = None |
2732
|
|
|
|
2733
|
|
|
__hash__ = _BaseObject.__hash__ |
2734
|
|
|
|
2735
|
|
|
def __init__(self, user, playlist_id, network): |
2736
|
|
|
_BaseObject.__init__(self, network, "playlist") |
2737
|
|
|
|
2738
|
|
|
if isinstance(user, User): |
2739
|
|
|
self.user = user |
2740
|
|
|
else: |
2741
|
|
|
self.user = User(user, self.network) |
2742
|
|
|
|
2743
|
|
|
self.id = playlist_id |
2744
|
|
|
|
2745
|
|
|
@_string_output |
2746
|
|
|
def __str__(self): |
2747
|
|
|
return repr(self.user) + "'s playlist # " + repr(self.id) |
2748
|
|
|
|
2749
|
|
|
def _get_info_node(self): |
2750
|
|
|
""" |
2751
|
|
|
Returns the node from user.getPlaylists where this playlist's info is. |
2752
|
|
|
""" |
2753
|
|
|
|
2754
|
|
|
doc = self._request("user.getPlaylists", True) |
2755
|
|
|
|
2756
|
|
|
for node in doc.getElementsByTagName("playlist"): |
2757
|
|
|
if _extract(node, "id") == str(self.get_id()): |
2758
|
|
|
return node |
2759
|
|
|
|
2760
|
|
|
def _get_params(self): |
2761
|
|
|
return {'user': self.user.get_name(), 'playlistID': self.get_id()} |
2762
|
|
|
|
2763
|
|
|
def get_id(self): |
2764
|
|
|
"""Returns the playlist ID.""" |
2765
|
|
|
|
2766
|
|
|
return self.id |
2767
|
|
|
|
2768
|
|
|
def get_user(self): |
2769
|
|
|
"""Returns the owner user of this playlist.""" |
2770
|
|
|
|
2771
|
|
|
return self.user |
2772
|
|
|
|
2773
|
|
|
def get_tracks(self): |
2774
|
|
|
"""Returns a list of the tracks on this user playlist.""" |
2775
|
|
|
|
2776
|
|
|
uri = _unicode('lastfm://playlist/%s') % self.get_id() |
2777
|
|
|
|
2778
|
|
|
return XSPF(uri, self.network).get_tracks() |
2779
|
|
|
|
2780
|
|
|
def add_track(self, track): |
2781
|
|
|
"""Adds a Track to this Playlist.""" |
2782
|
|
|
|
2783
|
|
|
params = self._get_params() |
2784
|
|
|
params['artist'] = track.get_artist().get_name() |
2785
|
|
|
params['track'] = track.get_title() |
2786
|
|
|
|
2787
|
|
|
self._request('playlist.addTrack', False, params) |
2788
|
|
|
|
2789
|
|
|
def get_title(self): |
2790
|
|
|
"""Returns the title of this playlist.""" |
2791
|
|
|
|
2792
|
|
|
return _extract(self._get_info_node(), "title") |
2793
|
|
|
|
2794
|
|
|
def get_creation_date(self): |
2795
|
|
|
"""Returns the creation date of this playlist.""" |
2796
|
|
|
|
2797
|
|
|
return _extract(self._get_info_node(), "date") |
2798
|
|
|
|
2799
|
|
|
def get_size(self): |
2800
|
|
|
"""Returns the number of tracks in this playlist.""" |
2801
|
|
|
|
2802
|
|
|
return _number(_extract(self._get_info_node(), "size")) |
2803
|
|
|
|
2804
|
|
|
def get_description(self): |
2805
|
|
|
"""Returns the description of this playlist.""" |
2806
|
|
|
|
2807
|
|
|
return _extract(self._get_info_node(), "description") |
2808
|
|
|
|
2809
|
|
|
def get_duration(self): |
2810
|
|
|
"""Returns the duration of this playlist in milliseconds.""" |
2811
|
|
|
|
2812
|
|
|
return _number(_extract(self._get_info_node(), "duration")) |
2813
|
|
|
|
2814
|
|
|
def is_streamable(self): |
2815
|
|
|
""" |
2816
|
|
|
Returns True if the playlist is streamable. |
2817
|
|
|
For a playlist to be streamable, it needs at least 45 tracks by 15 |
2818
|
|
|
different artists.""" |
2819
|
|
|
|
2820
|
|
|
if _extract(self._get_info_node(), "streamable") == '1': |
2821
|
|
|
return True |
2822
|
|
|
else: |
2823
|
|
|
return False |
2824
|
|
|
|
2825
|
|
|
def has_track(self, track): |
2826
|
|
|
"""Checks to see if track is already in the playlist. |
2827
|
|
|
* track: Any Track object. |
2828
|
|
|
""" |
2829
|
|
|
|
2830
|
|
|
return track in self.get_tracks() |
2831
|
|
|
|
2832
|
|
|
def get_cover_image(self, size=COVER_EXTRA_LARGE): |
2833
|
|
|
""" |
2834
|
|
|
Returns a uri to the cover image |
2835
|
|
|
size can be one of: |
2836
|
|
|
COVER_MEGA |
2837
|
|
|
COVER_EXTRA_LARGE |
2838
|
|
|
COVER_LARGE |
2839
|
|
|
COVER_MEDIUM |
2840
|
|
|
COVER_SMALL |
2841
|
|
|
""" |
2842
|
|
|
|
2843
|
|
|
return _extract(self._get_info_node(), "image")[size] |
2844
|
|
|
|
2845
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2846
|
|
|
"""Returns the url of the playlist on the network. |
2847
|
|
|
* domain_name: The network's language domain. Possible values: |
2848
|
|
|
o DOMAIN_ENGLISH |
2849
|
|
|
o DOMAIN_GERMAN |
2850
|
|
|
o DOMAIN_SPANISH |
2851
|
|
|
o DOMAIN_FRENCH |
2852
|
|
|
o DOMAIN_ITALIAN |
2853
|
|
|
o DOMAIN_POLISH |
2854
|
|
|
o DOMAIN_PORTUGUESE |
2855
|
|
|
o DOMAIN_SWEDISH |
2856
|
|
|
o DOMAIN_TURKISH |
2857
|
|
|
o DOMAIN_RUSSIAN |
2858
|
|
|
o DOMAIN_JAPANESE |
2859
|
|
|
o DOMAIN_CHINESE |
2860
|
|
|
""" |
2861
|
|
|
|
2862
|
|
|
english_url = _extract(self._get_info_node(), "url") |
2863
|
|
|
appendix = english_url[english_url.rfind("/") + 1:] |
2864
|
|
|
|
2865
|
|
|
return self.network._get_url(domain_name, "playlist") % { |
2866
|
|
|
'appendix': appendix, "user": self.get_user().get_name()} |
2867
|
|
|
|
2868
|
|
|
|
2869
|
|
|
class Tag(_BaseObject, _Chartable): |
2870
|
|
|
"""A Last.fm object tag.""" |
2871
|
|
|
|
2872
|
|
|
name = None |
2873
|
|
|
|
2874
|
|
|
__hash__ = _BaseObject.__hash__ |
2875
|
|
|
|
2876
|
|
|
def __init__(self, name, network): |
2877
|
|
|
_BaseObject.__init__(self, network, 'tag') |
2878
|
|
|
_Chartable.__init__(self, 'tag') |
2879
|
|
|
|
2880
|
|
|
self.name = name |
2881
|
|
|
|
2882
|
|
|
def __repr__(self): |
2883
|
|
|
return "pylast.Tag(%s, %s)" % (repr(self.name), repr(self.network)) |
2884
|
|
|
|
2885
|
|
|
@_string_output |
2886
|
|
|
def __str__(self): |
2887
|
|
|
return self.get_name() |
2888
|
|
|
|
2889
|
|
|
def __eq__(self, other): |
2890
|
|
|
return self.get_name().lower() == other.get_name().lower() |
2891
|
|
|
|
2892
|
|
|
def __ne__(self, other): |
2893
|
|
|
return self.get_name().lower() != other.get_name().lower() |
2894
|
|
|
|
2895
|
|
|
def _get_params(self): |
2896
|
|
|
return {self.ws_prefix: self.get_name()} |
2897
|
|
|
|
2898
|
|
|
def get_name(self, properly_capitalized=False): |
2899
|
|
|
"""Returns the name of the tag. """ |
2900
|
|
|
|
2901
|
|
|
if properly_capitalized: |
2902
|
|
|
self.name = _extract( |
2903
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
2904
|
|
|
|
2905
|
|
|
return self.name |
2906
|
|
|
|
2907
|
|
|
def get_similar(self): |
2908
|
|
|
"""Returns the tags similar to this one, ordered by similarity. """ |
2909
|
|
|
|
2910
|
|
|
doc = self._request(self.ws_prefix + '.getSimilar', True) |
2911
|
|
|
|
2912
|
|
|
seq = [] |
2913
|
|
|
names = _extract_all(doc, 'name') |
2914
|
|
|
for name in names: |
2915
|
|
|
seq.append(Tag(name, self.network)) |
2916
|
|
|
|
2917
|
|
|
return seq |
2918
|
|
|
|
2919
|
|
|
def get_top_albums(self, limit=None, cacheable=True): |
2920
|
|
|
"""Retuns a list of the top albums.""" |
2921
|
|
|
params = self._get_params() |
2922
|
|
|
if limit: |
2923
|
|
|
params['limit'] = limit |
2924
|
|
|
|
2925
|
|
|
doc = self._request( |
2926
|
|
|
self.ws_prefix + '.getTopAlbums', cacheable, params) |
2927
|
|
|
|
2928
|
|
|
return _extract_top_albums(doc, self.network) |
2929
|
|
|
|
2930
|
|
|
def get_top_tracks(self, limit=None, cacheable=True): |
2931
|
|
|
"""Returns a list of the most played Tracks for this tag.""" |
2932
|
|
|
params = self._get_params() |
2933
|
|
|
if limit: |
2934
|
|
|
params['limit'] = limit |
2935
|
|
|
|
2936
|
|
|
return self._get_things( |
2937
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2938
|
|
|
|
2939
|
|
|
def get_top_artists(self, limit=None, cacheable=True): |
2940
|
|
|
"""Returns a sequence of the most played artists.""" |
2941
|
|
|
|
2942
|
|
|
params = self._get_params() |
2943
|
|
|
if limit: |
2944
|
|
|
params['limit'] = limit |
2945
|
|
|
|
2946
|
|
|
doc = self._request( |
2947
|
|
|
self.ws_prefix + '.getTopArtists', cacheable, params) |
2948
|
|
|
|
2949
|
|
|
return _extract_top_artists(doc, self.network) |
2950
|
|
|
|
2951
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2952
|
|
|
"""Returns the url of the tag page on the network. |
2953
|
|
|
* domain_name: The network's language domain. Possible values: |
2954
|
|
|
o DOMAIN_ENGLISH |
2955
|
|
|
o DOMAIN_GERMAN |
2956
|
|
|
o DOMAIN_SPANISH |
2957
|
|
|
o DOMAIN_FRENCH |
2958
|
|
|
o DOMAIN_ITALIAN |
2959
|
|
|
o DOMAIN_POLISH |
2960
|
|
|
o DOMAIN_PORTUGUESE |
2961
|
|
|
o DOMAIN_SWEDISH |
2962
|
|
|
o DOMAIN_TURKISH |
2963
|
|
|
o DOMAIN_RUSSIAN |
2964
|
|
|
o DOMAIN_JAPANESE |
2965
|
|
|
o DOMAIN_CHINESE |
2966
|
|
|
""" |
2967
|
|
|
|
2968
|
|
|
name = _url_safe(self.get_name()) |
2969
|
|
|
|
2970
|
|
|
return self.network._get_url(domain_name, "tag") % {'name': name} |
2971
|
|
|
|
2972
|
|
|
|
2973
|
|
|
class Track(_Opus): |
2974
|
|
|
"""A Last.fm track.""" |
2975
|
|
|
|
2976
|
|
|
__hash__ = _Opus.__hash__ |
2977
|
|
|
|
2978
|
|
|
def __init__(self, artist, title, network, username=None): |
2979
|
|
|
super(Track, self).__init__(artist, title, network, "track", username) |
2980
|
|
|
|
2981
|
|
|
def get_correction(self): |
2982
|
|
|
"""Returns the corrected track name.""" |
2983
|
|
|
|
2984
|
|
|
return _extract( |
2985
|
|
|
self._request(self.ws_prefix + ".getCorrection"), "name") |
2986
|
|
|
|
2987
|
|
|
def get_duration(self): |
2988
|
|
|
"""Returns the track duration.""" |
2989
|
|
|
|
2990
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
2991
|
|
|
|
2992
|
|
|
return _number(_extract(doc, "duration")) |
2993
|
|
|
|
2994
|
|
|
def get_userloved(self): |
2995
|
|
|
"""Whether the user loved this track""" |
2996
|
|
|
|
2997
|
|
|
if not self.username: |
2998
|
|
|
return |
2999
|
|
|
|
3000
|
|
|
params = self._get_params() |
3001
|
|
|
params['username'] = self.username |
3002
|
|
|
|
3003
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
3004
|
|
|
loved = _number(_extract(doc, "userloved")) |
3005
|
|
|
return bool(loved) |
3006
|
|
|
|
3007
|
|
|
def is_streamable(self): |
3008
|
|
|
"""Returns True if the track is available at Last.fm.""" |
3009
|
|
|
|
3010
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3011
|
|
|
return _extract(doc, "streamable") == "1" |
3012
|
|
|
|
3013
|
|
|
def is_fulltrack_available(self): |
3014
|
|
|
"""Returns True if the fulltrack is available for streaming.""" |
3015
|
|
|
|
3016
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3017
|
|
|
return doc.getElementsByTagName( |
3018
|
|
|
"streamable")[0].getAttribute("fulltrack") == "1" |
3019
|
|
|
|
3020
|
|
|
def get_album(self): |
3021
|
|
|
"""Returns the album object of this track.""" |
3022
|
|
|
|
3023
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3024
|
|
|
|
3025
|
|
|
albums = doc.getElementsByTagName("album") |
3026
|
|
|
|
3027
|
|
|
if len(albums) == 0: |
3028
|
|
|
return |
3029
|
|
|
|
3030
|
|
|
node = doc.getElementsByTagName("album")[0] |
3031
|
|
|
return Album( |
3032
|
|
|
_extract(node, "artist"), _extract(node, "title"), self.network) |
3033
|
|
|
|
3034
|
|
|
def love(self): |
3035
|
|
|
"""Adds the track to the user's loved tracks. """ |
3036
|
|
|
|
3037
|
|
|
self._request(self.ws_prefix + '.love') |
3038
|
|
|
|
3039
|
|
|
def unlove(self): |
3040
|
|
|
"""Remove the track to the user's loved tracks. """ |
3041
|
|
|
|
3042
|
|
|
self._request(self.ws_prefix + '.unlove') |
3043
|
|
|
|
3044
|
|
|
def ban(self): |
3045
|
|
|
"""Ban this track from ever playing on the radio. """ |
3046
|
|
|
|
3047
|
|
|
self._request(self.ws_prefix + '.ban') |
3048
|
|
|
|
3049
|
|
|
def get_similar(self): |
3050
|
|
|
""" |
3051
|
|
|
Returns similar tracks for this track on the network, |
3052
|
|
|
based on listening data. |
3053
|
|
|
""" |
3054
|
|
|
|
3055
|
|
|
doc = self._request(self.ws_prefix + '.getSimilar', True) |
3056
|
|
|
|
3057
|
|
|
seq = [] |
3058
|
|
|
for node in doc.getElementsByTagName(self.ws_prefix): |
3059
|
|
|
title = _extract(node, 'name') |
3060
|
|
|
artist = _extract(node, 'name', 1) |
3061
|
|
|
match = _number(_extract(node, "match")) |
3062
|
|
|
|
3063
|
|
|
seq.append(SimilarItem(Track(artist, title, self.network), match)) |
3064
|
|
|
|
3065
|
|
|
return seq |
3066
|
|
|
|
3067
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3068
|
|
|
"""Returns the URL of the album or track page on the network. |
3069
|
|
|
# Parameters: |
3070
|
|
|
* domain_name str: The network's language domain. Possible values: |
3071
|
|
|
o DOMAIN_ENGLISH |
3072
|
|
|
o DOMAIN_GERMAN |
3073
|
|
|
o DOMAIN_SPANISH |
3074
|
|
|
o DOMAIN_FRENCH |
3075
|
|
|
o DOMAIN_ITALIAN |
3076
|
|
|
o DOMAIN_POLISH |
3077
|
|
|
o DOMAIN_PORTUGUESE |
3078
|
|
|
o DOMAIN_SWEDISH |
3079
|
|
|
o DOMAIN_TURKISH |
3080
|
|
|
o DOMAIN_RUSSIAN |
3081
|
|
|
o DOMAIN_JAPANESE |
3082
|
|
|
o DOMAIN_CHINESE |
3083
|
|
|
""" |
3084
|
|
|
|
3085
|
|
|
artist = _url_safe(self.get_artist().get_name()) |
3086
|
|
|
title = _url_safe(self.get_title()) |
3087
|
|
|
|
3088
|
|
|
return self.network._get_url( |
3089
|
|
|
domain_name, self.ws_prefix) % { |
3090
|
|
|
'artist': artist, 'title': title} |
3091
|
|
|
|
3092
|
|
|
|
3093
|
|
|
class Group(_BaseObject, _Chartable): |
3094
|
|
|
"""A Last.fm group.""" |
3095
|
|
|
|
3096
|
|
|
name = None |
3097
|
|
|
|
3098
|
|
|
__hash__ = _BaseObject.__hash__ |
3099
|
|
|
|
3100
|
|
|
def __init__(self, name, network): |
3101
|
|
|
_BaseObject.__init__(self, network, 'group') |
3102
|
|
|
_Chartable.__init__(self, 'group') |
3103
|
|
|
|
3104
|
|
|
self.name = name |
3105
|
|
|
|
3106
|
|
|
def __repr__(self): |
3107
|
|
|
return "pylast.Group(%s, %s)" % (repr(self.name), repr(self.network)) |
3108
|
|
|
|
3109
|
|
|
@_string_output |
3110
|
|
|
def __str__(self): |
3111
|
|
|
return self.get_name() |
3112
|
|
|
|
3113
|
|
|
def __eq__(self, other): |
3114
|
|
|
return self.get_name().lower() == other.get_name().lower() |
3115
|
|
|
|
3116
|
|
|
def __ne__(self, other): |
3117
|
|
|
return self.get_name() != other.get_name() |
3118
|
|
|
|
3119
|
|
|
def _get_params(self): |
3120
|
|
|
return {self.ws_prefix: self.get_name()} |
3121
|
|
|
|
3122
|
|
|
def get_name(self): |
3123
|
|
|
"""Returns the group name. """ |
3124
|
|
|
return self.name |
3125
|
|
|
|
3126
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3127
|
|
|
"""Returns the url of the group page on the network. |
3128
|
|
|
* domain_name: The network's language domain. Possible values: |
3129
|
|
|
o DOMAIN_ENGLISH |
3130
|
|
|
o DOMAIN_GERMAN |
3131
|
|
|
o DOMAIN_SPANISH |
3132
|
|
|
o DOMAIN_FRENCH |
3133
|
|
|
o DOMAIN_ITALIAN |
3134
|
|
|
o DOMAIN_POLISH |
3135
|
|
|
o DOMAIN_PORTUGUESE |
3136
|
|
|
o DOMAIN_SWEDISH |
3137
|
|
|
o DOMAIN_TURKISH |
3138
|
|
|
o DOMAIN_RUSSIAN |
3139
|
|
|
o DOMAIN_JAPANESE |
3140
|
|
|
o DOMAIN_CHINESE |
3141
|
|
|
""" |
3142
|
|
|
|
3143
|
|
|
name = _url_safe(self.get_name()) |
3144
|
|
|
|
3145
|
|
|
return self.network._get_url(domain_name, "group") % {'name': name} |
3146
|
|
|
|
3147
|
|
|
def get_members(self, limit=50, cacheable=False): |
3148
|
|
|
""" |
3149
|
|
|
Returns a sequence of User objects |
3150
|
|
|
if limit==None it will return all |
3151
|
|
|
""" |
3152
|
|
|
|
3153
|
|
|
nodes = _collect_nodes( |
3154
|
|
|
limit, self, self.ws_prefix + ".getMembers", cacheable) |
3155
|
|
|
|
3156
|
|
|
users = [] |
3157
|
|
|
|
3158
|
|
|
for node in nodes: |
3159
|
|
|
users.append(User(_extract(node, "name"), self.network)) |
3160
|
|
|
|
3161
|
|
|
return users |
3162
|
|
|
|
3163
|
|
|
|
3164
|
|
|
class XSPF(_BaseObject): |
3165
|
|
|
"A Last.fm XSPF playlist.""" |
3166
|
|
|
|
3167
|
|
|
uri = None |
3168
|
|
|
|
3169
|
|
|
__hash__ = _BaseObject.__hash__ |
3170
|
|
|
|
3171
|
|
|
def __init__(self, uri, network): |
3172
|
|
|
_BaseObject.__init__(self, network, None) |
3173
|
|
|
|
3174
|
|
|
self.uri = uri |
3175
|
|
|
|
3176
|
|
|
def _get_params(self): |
3177
|
|
|
return {'playlistURL': self.get_uri()} |
3178
|
|
|
|
3179
|
|
|
@_string_output |
3180
|
|
|
def __str__(self): |
3181
|
|
|
return self.get_uri() |
3182
|
|
|
|
3183
|
|
|
def __eq__(self, other): |
3184
|
|
|
return self.get_uri() == other.get_uri() |
3185
|
|
|
|
3186
|
|
|
def __ne__(self, other): |
3187
|
|
|
return self.get_uri() != other.get_uri() |
3188
|
|
|
|
3189
|
|
|
def get_uri(self): |
3190
|
|
|
"""Returns the Last.fm playlist URI. """ |
3191
|
|
|
|
3192
|
|
|
return self.uri |
3193
|
|
|
|
3194
|
|
|
def get_tracks(self): |
3195
|
|
|
"""Returns the tracks on this playlist.""" |
3196
|
|
|
|
3197
|
|
|
doc = self._request('playlist.fetch', True) |
3198
|
|
|
|
3199
|
|
|
seq = [] |
3200
|
|
|
for node in doc.getElementsByTagName('track'): |
3201
|
|
|
title = _extract(node, 'title') |
3202
|
|
|
artist = _extract(node, 'creator') |
3203
|
|
|
|
3204
|
|
|
seq.append(Track(artist, title, self.network)) |
3205
|
|
|
|
3206
|
|
|
return seq |
3207
|
|
|
|
3208
|
|
|
|
3209
|
|
|
class User(_BaseObject, _Chartable): |
3210
|
|
|
"""A Last.fm user.""" |
3211
|
|
|
|
3212
|
|
|
name = None |
3213
|
|
|
|
3214
|
|
|
__hash__ = _BaseObject.__hash__ |
3215
|
|
|
|
3216
|
|
|
def __init__(self, user_name, network): |
3217
|
|
|
_BaseObject.__init__(self, network, 'user') |
3218
|
|
|
_Chartable.__init__(self, 'user') |
3219
|
|
|
|
3220
|
|
|
self.name = user_name |
3221
|
|
|
|
3222
|
|
|
self._past_events_index = 0 |
3223
|
|
|
self._recommended_events_index = 0 |
3224
|
|
|
self._recommended_artists_index = 0 |
3225
|
|
|
|
3226
|
|
|
def __repr__(self): |
3227
|
|
|
return "pylast.User(%s, %s)" % (repr(self.name), repr(self.network)) |
3228
|
|
|
|
3229
|
|
|
@_string_output |
3230
|
|
|
def __str__(self): |
3231
|
|
|
return self.get_name() |
3232
|
|
|
|
3233
|
|
|
def __eq__(self, another): |
3234
|
|
|
if isinstance(another, User): |
3235
|
|
|
return self.get_name() == another.get_name() |
3236
|
|
|
else: |
3237
|
|
|
return False |
3238
|
|
|
|
3239
|
|
|
def __ne__(self, another): |
3240
|
|
|
if isinstance(another, User): |
3241
|
|
|
return self.get_name() != another.get_name() |
3242
|
|
|
else: |
3243
|
|
|
return True |
3244
|
|
|
|
3245
|
|
|
def _get_params(self): |
3246
|
|
|
return {self.ws_prefix: self.get_name()} |
3247
|
|
|
|
3248
|
|
|
def get_name(self, properly_capitalized=False): |
3249
|
|
|
"""Returns the user name.""" |
3250
|
|
|
|
3251
|
|
|
if properly_capitalized: |
3252
|
|
|
self.name = _extract( |
3253
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
3254
|
|
|
|
3255
|
|
|
return self.name |
3256
|
|
|
|
3257
|
|
|
def get_upcoming_events(self): |
3258
|
|
|
"""Returns all the upcoming events for this user.""" |
3259
|
|
|
|
3260
|
|
|
doc = self._request(self.ws_prefix + '.getEvents', True) |
3261
|
|
|
|
3262
|
|
|
return _extract_events_from_doc(doc, self.network) |
3263
|
|
|
|
3264
|
|
View Code Duplication |
def get_artist_tracks(self, artist, cacheable=False): |
|
|
|
|
3265
|
|
|
""" |
3266
|
|
|
Get a list of tracks by a given artist scrobbled by this user, |
3267
|
|
|
including scrobble time. |
3268
|
|
|
""" |
3269
|
|
|
# Not implemented: |
3270
|
|
|
# "Can be limited to specific timeranges, defaults to all time." |
3271
|
|
|
|
3272
|
|
|
params = self._get_params() |
3273
|
|
|
params['artist'] = artist |
3274
|
|
|
|
3275
|
|
|
seq = [] |
3276
|
|
|
for track in _collect_nodes( |
3277
|
|
|
None, |
3278
|
|
|
self, |
3279
|
|
|
self.ws_prefix + ".getArtistTracks", |
3280
|
|
|
cacheable, |
3281
|
|
|
params): |
3282
|
|
|
title = _extract(track, "name") |
3283
|
|
|
artist = _extract(track, "artist") |
3284
|
|
|
date = _extract(track, "date") |
3285
|
|
|
album = _extract(track, "album") |
3286
|
|
|
timestamp = track.getElementsByTagName( |
3287
|
|
|
"date")[0].getAttribute("uts") |
3288
|
|
|
|
3289
|
|
|
seq.append(PlayedTrack( |
3290
|
|
|
Track(artist, title, self.network), album, date, timestamp)) |
3291
|
|
|
|
3292
|
|
|
return seq |
3293
|
|
|
|
3294
|
|
|
def get_friends(self, limit=50, cacheable=False): |
3295
|
|
|
"""Returns a list of the user's friends. """ |
3296
|
|
|
|
3297
|
|
|
seq = [] |
3298
|
|
|
for node in _collect_nodes( |
3299
|
|
|
limit, |
3300
|
|
|
self, |
3301
|
|
|
self.ws_prefix + ".getFriends", |
3302
|
|
|
cacheable): |
3303
|
|
|
seq.append(User(_extract(node, "name"), self.network)) |
3304
|
|
|
|
3305
|
|
|
return seq |
3306
|
|
|
|
3307
|
|
View Code Duplication |
def get_loved_tracks(self, limit=50, cacheable=True): |
|
|
|
|
3308
|
|
|
""" |
3309
|
|
|
Returns this user's loved track as a sequence of LovedTrack objects in |
3310
|
|
|
reverse order of their timestamp, all the way back to the first track. |
3311
|
|
|
|
3312
|
|
|
If limit==None, it will try to pull all the available data. |
3313
|
|
|
|
3314
|
|
|
This method uses caching. Enable caching only if you're pulling a |
3315
|
|
|
large amount of data. |
3316
|
|
|
|
3317
|
|
|
Use extract_items() with the return of this function to |
3318
|
|
|
get only a sequence of Track objects with no playback dates. |
3319
|
|
|
""" |
3320
|
|
|
|
3321
|
|
|
params = self._get_params() |
3322
|
|
|
if limit: |
3323
|
|
|
params['limit'] = limit |
3324
|
|
|
|
3325
|
|
|
seq = [] |
3326
|
|
|
for track in _collect_nodes( |
3327
|
|
|
limit, |
3328
|
|
|
self, |
3329
|
|
|
self.ws_prefix + ".getLovedTracks", |
3330
|
|
|
cacheable, |
3331
|
|
|
params): |
3332
|
|
|
title = _extract(track, "name") |
3333
|
|
|
artist = _extract(track, "name", 1) |
3334
|
|
|
date = _extract(track, "date") |
3335
|
|
|
timestamp = track.getElementsByTagName( |
3336
|
|
|
"date")[0].getAttribute("uts") |
3337
|
|
|
|
3338
|
|
|
seq.append(LovedTrack( |
3339
|
|
|
Track(artist, title, self.network), date, timestamp)) |
3340
|
|
|
|
3341
|
|
|
return seq |
3342
|
|
|
|
3343
|
|
|
def get_neighbours(self, limit=50, cacheable=True): |
3344
|
|
|
"""Returns a list of the user's friends.""" |
3345
|
|
|
|
3346
|
|
|
params = self._get_params() |
3347
|
|
|
if limit: |
3348
|
|
|
params['limit'] = limit |
3349
|
|
|
|
3350
|
|
|
doc = self._request( |
3351
|
|
|
self.ws_prefix + '.getNeighbours', cacheable, params) |
3352
|
|
|
|
3353
|
|
|
seq = [] |
3354
|
|
|
names = _extract_all(doc, 'name') |
3355
|
|
|
|
3356
|
|
|
for name in names: |
3357
|
|
|
seq.append(User(name, self.network)) |
3358
|
|
|
|
3359
|
|
|
return seq |
3360
|
|
|
|
3361
|
|
|
def get_past_events(self, limit=50, cacheable=False): |
3362
|
|
|
""" |
3363
|
|
|
Returns a sequence of Event objects |
3364
|
|
|
if limit==None it will return all |
3365
|
|
|
""" |
3366
|
|
|
|
3367
|
|
|
seq = [] |
3368
|
|
|
for node in _collect_nodes( |
3369
|
|
|
limit, |
3370
|
|
|
self, |
3371
|
|
|
self.ws_prefix + ".getPastEvents", |
3372
|
|
|
cacheable): |
3373
|
|
|
seq.append(Event(_extract(node, "id"), self.network)) |
3374
|
|
|
|
3375
|
|
|
return seq |
3376
|
|
|
|
3377
|
|
|
def get_playlists(self): |
3378
|
|
|
"""Returns a list of Playlists that this user owns.""" |
3379
|
|
|
|
3380
|
|
|
doc = self._request(self.ws_prefix + ".getPlaylists", True) |
3381
|
|
|
|
3382
|
|
|
playlists = [] |
3383
|
|
|
for playlist_id in _extract_all(doc, "id"): |
3384
|
|
|
playlists.append( |
3385
|
|
|
Playlist(self.get_name(), playlist_id, self.network)) |
3386
|
|
|
|
3387
|
|
|
return playlists |
3388
|
|
|
|
3389
|
|
|
def get_now_playing(self): |
3390
|
|
|
""" |
3391
|
|
|
Returns the currently playing track, or None if nothing is playing. |
3392
|
|
|
""" |
3393
|
|
|
|
3394
|
|
|
params = self._get_params() |
3395
|
|
|
params['limit'] = '1' |
3396
|
|
|
|
3397
|
|
|
doc = self._request(self.ws_prefix + '.getRecentTracks', False, params) |
3398
|
|
|
|
3399
|
|
|
tracks = doc.getElementsByTagName('track') |
3400
|
|
|
|
3401
|
|
|
if len(tracks) == 0: |
3402
|
|
|
return None |
3403
|
|
|
|
3404
|
|
|
e = tracks[0] |
3405
|
|
|
|
3406
|
|
|
if not e.hasAttribute('nowplaying'): |
3407
|
|
|
return None |
3408
|
|
|
|
3409
|
|
|
artist = _extract(e, 'artist') |
3410
|
|
|
title = _extract(e, 'name') |
3411
|
|
|
|
3412
|
|
|
return Track(artist, title, self.network, self.name) |
3413
|
|
|
|
3414
|
|
View Code Duplication |
def get_recent_tracks(self, limit=10, cacheable=True, |
|
|
|
|
3415
|
|
|
time_from=None, time_to=None): |
3416
|
|
|
""" |
3417
|
|
|
Returns this user's played track as a sequence of PlayedTrack objects |
3418
|
|
|
in reverse order of playtime, all the way back to the first track. |
3419
|
|
|
|
3420
|
|
|
Parameters: |
3421
|
|
|
limit : If None, it will try to pull all the available data. |
3422
|
|
|
from (Optional) : Beginning timestamp of a range - only display |
3423
|
|
|
scrobbles after this time, in UNIX timestamp format (integer |
3424
|
|
|
number of seconds since 00:00:00, January 1st 1970 UTC). This |
3425
|
|
|
must be in the UTC time zone. |
3426
|
|
|
to (Optional) : End timestamp of a range - only display scrobbles |
3427
|
|
|
before this time, in UNIX timestamp format (integer number of |
3428
|
|
|
seconds since 00:00:00, January 1st 1970 UTC). This must be in |
3429
|
|
|
the UTC time zone. |
3430
|
|
|
|
3431
|
|
|
This method uses caching. Enable caching only if you're pulling a |
3432
|
|
|
large amount of data. |
3433
|
|
|
|
3434
|
|
|
Use extract_items() with the return of this function to |
3435
|
|
|
get only a sequence of Track objects with no playback dates. |
3436
|
|
|
""" |
3437
|
|
|
|
3438
|
|
|
params = self._get_params() |
3439
|
|
|
if limit: |
3440
|
|
|
params['limit'] = limit |
3441
|
|
|
if time_from: |
3442
|
|
|
params['from'] = time_from |
3443
|
|
|
if time_to: |
3444
|
|
|
params['to'] = time_to |
3445
|
|
|
|
3446
|
|
|
seq = [] |
3447
|
|
|
for track in _collect_nodes( |
3448
|
|
|
limit, |
3449
|
|
|
self, |
3450
|
|
|
self.ws_prefix + ".getRecentTracks", |
3451
|
|
|
cacheable, |
3452
|
|
|
params): |
3453
|
|
|
|
3454
|
|
|
if track.hasAttribute('nowplaying'): |
3455
|
|
|
continue # to prevent the now playing track from sneaking in |
3456
|
|
|
|
3457
|
|
|
title = _extract(track, "name") |
3458
|
|
|
artist = _extract(track, "artist") |
3459
|
|
|
date = _extract(track, "date") |
3460
|
|
|
album = _extract(track, "album") |
3461
|
|
|
timestamp = track.getElementsByTagName( |
3462
|
|
|
"date")[0].getAttribute("uts") |
3463
|
|
|
|
3464
|
|
|
seq.append(PlayedTrack( |
3465
|
|
|
Track(artist, title, self.network), album, date, timestamp)) |
3466
|
|
|
|
3467
|
|
|
return seq |
3468
|
|
|
|
3469
|
|
|
def get_id(self): |
3470
|
|
|
"""Returns the user ID.""" |
3471
|
|
|
|
3472
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3473
|
|
|
|
3474
|
|
|
return _extract(doc, "id") |
3475
|
|
|
|
3476
|
|
|
def get_language(self): |
3477
|
|
|
"""Returns the language code of the language used by the user.""" |
3478
|
|
|
|
3479
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3480
|
|
|
|
3481
|
|
|
return _extract(doc, "lang") |
3482
|
|
|
|
3483
|
|
|
def get_country(self): |
3484
|
|
|
"""Returns the name of the country of the user.""" |
3485
|
|
|
|
3486
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3487
|
|
|
|
3488
|
|
|
country = _extract(doc, "country") |
3489
|
|
|
|
3490
|
|
|
if country is None: |
3491
|
|
|
return None |
3492
|
|
|
else: |
3493
|
|
|
return Country(country, self.network) |
3494
|
|
|
|
3495
|
|
|
def get_age(self): |
3496
|
|
|
"""Returns the user's age.""" |
3497
|
|
|
|
3498
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3499
|
|
|
|
3500
|
|
|
return _number(_extract(doc, "age")) |
3501
|
|
|
|
3502
|
|
|
def get_gender(self): |
3503
|
|
|
"""Returns the user's gender. Either USER_MALE or USER_FEMALE.""" |
3504
|
|
|
|
3505
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3506
|
|
|
|
3507
|
|
|
value = _extract(doc, "gender") |
3508
|
|
|
|
3509
|
|
|
if value == 'm': |
3510
|
|
|
return USER_MALE |
3511
|
|
|
elif value == 'f': |
3512
|
|
|
return USER_FEMALE |
3513
|
|
|
|
3514
|
|
|
return None |
3515
|
|
|
|
3516
|
|
|
def is_subscriber(self): |
3517
|
|
|
"""Returns whether the user is a subscriber or not. True or False.""" |
3518
|
|
|
|
3519
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3520
|
|
|
|
3521
|
|
|
return _extract(doc, "subscriber") == "1" |
3522
|
|
|
|
3523
|
|
|
def get_playcount(self): |
3524
|
|
|
"""Returns the user's playcount so far.""" |
3525
|
|
|
|
3526
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3527
|
|
|
|
3528
|
|
|
return _number(_extract(doc, "playcount")) |
3529
|
|
|
|
3530
|
|
|
def get_registered(self): |
3531
|
|
|
"""Returns the user's registration date.""" |
3532
|
|
|
|
3533
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3534
|
|
|
|
3535
|
|
|
return _extract(doc, "registered") |
3536
|
|
|
|
3537
|
|
|
def get_unixtime_registered(self): |
3538
|
|
|
"""Returns the user's registration date as a UNIX timestamp.""" |
3539
|
|
|
|
3540
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3541
|
|
|
|
3542
|
|
|
return doc.getElementsByTagName( |
3543
|
|
|
"registered")[0].getAttribute("unixtime") |
3544
|
|
|
|
3545
|
|
|
def get_tagged_albums(self, tag, limit=None, cacheable=True): |
3546
|
|
|
"""Returns the albums tagged by a user.""" |
3547
|
|
|
|
3548
|
|
|
params = self._get_params() |
3549
|
|
|
params['tag'] = tag |
3550
|
|
|
params['taggingtype'] = 'album' |
3551
|
|
|
if limit: |
3552
|
|
|
params['limit'] = limit |
3553
|
|
|
doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, |
3554
|
|
|
params) |
3555
|
|
|
return _extract_albums(doc, self.network) |
3556
|
|
|
|
3557
|
|
|
def get_tagged_artists(self, tag, limit=None): |
3558
|
|
|
"""Returns the artists tagged by a user.""" |
3559
|
|
|
|
3560
|
|
|
params = self._get_params() |
3561
|
|
|
params['tag'] = tag |
3562
|
|
|
params['taggingtype'] = 'artist' |
3563
|
|
|
if limit: |
3564
|
|
|
params["limit"] = limit |
3565
|
|
|
doc = self._request(self.ws_prefix + '.getpersonaltags', True, params) |
3566
|
|
|
return _extract_artists(doc, self.network) |
3567
|
|
|
|
3568
|
|
|
def get_tagged_tracks(self, tag, limit=None, cacheable=True): |
3569
|
|
|
"""Returns the tracks tagged by a user.""" |
3570
|
|
|
|
3571
|
|
|
params = self._get_params() |
3572
|
|
|
params['tag'] = tag |
3573
|
|
|
params['taggingtype'] = 'track' |
3574
|
|
|
if limit: |
3575
|
|
|
params['limit'] = limit |
3576
|
|
|
doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, |
3577
|
|
|
params) |
3578
|
|
|
return _extract_tracks(doc, self.network) |
3579
|
|
|
|
3580
|
|
|
def get_top_albums( |
3581
|
|
|
self, period=PERIOD_OVERALL, limit=None, cacheable=True): |
3582
|
|
|
"""Returns the top albums played by a user. |
3583
|
|
|
* period: The period of time. Possible values: |
3584
|
|
|
o PERIOD_OVERALL |
3585
|
|
|
o PERIOD_7DAYS |
3586
|
|
|
o PERIOD_1MONTH |
3587
|
|
|
o PERIOD_3MONTHS |
3588
|
|
|
o PERIOD_6MONTHS |
3589
|
|
|
o PERIOD_12MONTHS |
3590
|
|
|
""" |
3591
|
|
|
|
3592
|
|
|
params = self._get_params() |
3593
|
|
|
params['period'] = period |
3594
|
|
|
if limit: |
3595
|
|
|
params['limit'] = limit |
3596
|
|
|
|
3597
|
|
|
doc = self._request( |
3598
|
|
|
self.ws_prefix + '.getTopAlbums', cacheable, params) |
3599
|
|
|
|
3600
|
|
|
return _extract_top_albums(doc, self.network) |
3601
|
|
|
|
3602
|
|
|
def get_top_artists(self, period=PERIOD_OVERALL, limit=None): |
3603
|
|
|
"""Returns the top artists played by a user. |
3604
|
|
|
* period: The period of time. Possible values: |
3605
|
|
|
o PERIOD_OVERALL |
3606
|
|
|
o PERIOD_7DAYS |
3607
|
|
|
o PERIOD_1MONTH |
3608
|
|
|
o PERIOD_3MONTHS |
3609
|
|
|
o PERIOD_6MONTHS |
3610
|
|
|
o PERIOD_12MONTHS |
3611
|
|
|
""" |
3612
|
|
|
|
3613
|
|
|
params = self._get_params() |
3614
|
|
|
params['period'] = period |
3615
|
|
|
if limit: |
3616
|
|
|
params["limit"] = limit |
3617
|
|
|
|
3618
|
|
|
doc = self._request(self.ws_prefix + '.getTopArtists', True, params) |
3619
|
|
|
|
3620
|
|
|
return _extract_top_artists(doc, self.network) |
3621
|
|
|
|
3622
|
|
|
def get_top_tags(self, limit=None, cacheable=True): |
3623
|
|
|
""" |
3624
|
|
|
Returns a sequence of the top tags used by this user with their counts |
3625
|
|
|
as TopItem objects. |
3626
|
|
|
* limit: The limit of how many tags to return. |
3627
|
|
|
* cacheable: Whether to cache results. |
3628
|
|
|
""" |
3629
|
|
|
|
3630
|
|
|
params = self._get_params() |
3631
|
|
|
if limit: |
3632
|
|
|
params["limit"] = limit |
3633
|
|
|
|
3634
|
|
|
doc = self._request(self.ws_prefix + ".getTopTags", cacheable, params) |
3635
|
|
|
|
3636
|
|
|
seq = [] |
3637
|
|
|
for node in doc.getElementsByTagName("tag"): |
3638
|
|
|
seq.append(TopItem( |
3639
|
|
|
Tag(_extract(node, "name"), self.network), |
3640
|
|
|
_extract(node, "count"))) |
3641
|
|
|
|
3642
|
|
|
return seq |
3643
|
|
|
|
3644
|
|
|
def get_top_tracks( |
3645
|
|
|
self, period=PERIOD_OVERALL, limit=None, cacheable=True): |
3646
|
|
|
"""Returns the top tracks played by a user. |
3647
|
|
|
* period: The period of time. Possible values: |
3648
|
|
|
o PERIOD_OVERALL |
3649
|
|
|
o PERIOD_7DAYS |
3650
|
|
|
o PERIOD_1MONTH |
3651
|
|
|
o PERIOD_3MONTHS |
3652
|
|
|
o PERIOD_6MONTHS |
3653
|
|
|
o PERIOD_12MONTHS |
3654
|
|
|
""" |
3655
|
|
|
|
3656
|
|
|
params = self._get_params() |
3657
|
|
|
params['period'] = period |
3658
|
|
|
if limit: |
3659
|
|
|
params['limit'] = limit |
3660
|
|
|
|
3661
|
|
|
return self._get_things( |
3662
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
3663
|
|
|
|
3664
|
|
|
def compare_with_user(self, user, shared_artists_limit=None): |
3665
|
|
|
""" |
3666
|
|
|
Compare this user with another Last.fm user. |
3667
|
|
|
Returns a sequence: |
3668
|
|
|
(tasteometer_score, (shared_artist1, shared_artist2, ...)) |
3669
|
|
|
user: A User object or a username string/unicode object. |
3670
|
|
|
""" |
3671
|
|
|
|
3672
|
|
|
if isinstance(user, User): |
3673
|
|
|
user = user.get_name() |
3674
|
|
|
|
3675
|
|
|
params = self._get_params() |
3676
|
|
|
if shared_artists_limit: |
3677
|
|
|
params['limit'] = shared_artists_limit |
3678
|
|
|
params['type1'] = 'user' |
3679
|
|
|
params['type2'] = 'user' |
3680
|
|
|
params['value1'] = self.get_name() |
3681
|
|
|
params['value2'] = user |
3682
|
|
|
|
3683
|
|
|
doc = self._request('tasteometer.compare', False, params) |
3684
|
|
|
|
3685
|
|
|
score = _extract(doc, 'score') |
3686
|
|
|
|
3687
|
|
|
artists = doc.getElementsByTagName('artists')[0] |
3688
|
|
|
shared_artists_names = _extract_all(artists, 'name') |
3689
|
|
|
|
3690
|
|
|
shared_artists_seq = [] |
3691
|
|
|
|
3692
|
|
|
for name in shared_artists_names: |
3693
|
|
|
shared_artists_seq.append(Artist(name, self.network)) |
3694
|
|
|
|
3695
|
|
|
return (score, shared_artists_seq) |
3696
|
|
|
|
3697
|
|
|
def get_image(self): |
3698
|
|
|
"""Returns the user's avatar.""" |
3699
|
|
|
|
3700
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3701
|
|
|
|
3702
|
|
|
return _extract(doc, "image") |
3703
|
|
|
|
3704
|
|
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3705
|
|
|
"""Returns the url of the user page on the network. |
3706
|
|
|
* domain_name: The network's language domain. Possible values: |
3707
|
|
|
o DOMAIN_ENGLISH |
3708
|
|
|
o DOMAIN_GERMAN |
3709
|
|
|
o DOMAIN_SPANISH |
3710
|
|
|
o DOMAIN_FRENCH |
3711
|
|
|
o DOMAIN_ITALIAN |
3712
|
|
|
o DOMAIN_POLISH |
3713
|
|
|
o DOMAIN_PORTUGUESE |
3714
|
|
|
o DOMAIN_SWEDISH |
3715
|
|
|
o DOMAIN_TURKISH |
3716
|
|
|
o DOMAIN_RUSSIAN |
3717
|
|
|
o DOMAIN_JAPANESE |
3718
|
|
|
o DOMAIN_CHINESE |
3719
|
|
|
""" |
3720
|
|
|
|
3721
|
|
|
name = _url_safe(self.get_name()) |
3722
|
|
|
|
3723
|
|
|
return self.network._get_url(domain_name, "user") % {'name': name} |
3724
|
|
|
|
3725
|
|
|
def get_library(self): |
3726
|
|
|
"""Returns the associated Library object. """ |
3727
|
|
|
|
3728
|
|
|
return Library(self, self.network) |
3729
|
|
|
|
3730
|
|
|
def shout(self, message): |
3731
|
|
|
""" |
3732
|
|
|
Post a shout |
3733
|
|
|
""" |
3734
|
|
|
|
3735
|
|
|
params = self._get_params() |
3736
|
|
|
params["message"] = message |
3737
|
|
|
|
3738
|
|
|
self._request(self.ws_prefix + ".Shout", False, params) |
3739
|
|
|
|
3740
|
|
|
|
3741
|
|
|
class AuthenticatedUser(User): |
3742
|
|
|
def __init__(self, network): |
3743
|
|
|
User.__init__(self, "", network) |
3744
|
|
|
|
3745
|
|
|
def _get_params(self): |
3746
|
|
|
return {"user": self.get_name()} |
3747
|
|
|
|
3748
|
|
|
def get_name(self): |
3749
|
|
|
"""Returns the name of the authenticated user.""" |
3750
|
|
|
|
3751
|
|
|
doc = self._request("user.getInfo", True, {"user": ""}) # hack |
3752
|
|
|
|
3753
|
|
|
self.name = _extract(doc, "name") |
3754
|
|
|
return self.name |
3755
|
|
|
|
3756
|
|
|
def get_recommended_events(self, limit=50, cacheable=False): |
3757
|
|
|
""" |
3758
|
|
|
Returns a sequence of Event objects |
3759
|
|
|
if limit==None it will return all |
3760
|
|
|
""" |
3761
|
|
|
|
3762
|
|
|
seq = [] |
3763
|
|
|
for node in _collect_nodes( |
3764
|
|
|
limit, self, "user.getRecommendedEvents", cacheable): |
3765
|
|
|
seq.append(Event(_extract(node, "id"), self.network)) |
3766
|
|
|
|
3767
|
|
|
return seq |
3768
|
|
|
|
3769
|
|
|
def get_recommended_artists(self, limit=50, cacheable=False): |
3770
|
|
|
""" |
3771
|
|
|
Returns a sequence of Artist objects |
3772
|
|
|
if limit==None it will return all |
3773
|
|
|
""" |
3774
|
|
|
|
3775
|
|
|
seq = [] |
3776
|
|
|
for node in _collect_nodes( |
3777
|
|
|
limit, self, "user.getRecommendedArtists", cacheable): |
3778
|
|
|
seq.append(Artist(_extract(node, "name"), self.network)) |
3779
|
|
|
|
3780
|
|
|
return seq |
3781
|
|
|
|
3782
|
|
|
|
3783
|
|
|
class _Search(_BaseObject): |
3784
|
|
|
"""An abstract class. Use one of its derivatives.""" |
3785
|
|
|
|
3786
|
|
|
def __init__(self, ws_prefix, search_terms, network): |
3787
|
|
|
_BaseObject.__init__(self, network, ws_prefix) |
3788
|
|
|
|
3789
|
|
|
self._ws_prefix = ws_prefix |
3790
|
|
|
self.search_terms = search_terms |
3791
|
|
|
|
3792
|
|
|
self._last_page_index = 0 |
3793
|
|
|
|
3794
|
|
|
def _get_params(self): |
3795
|
|
|
params = {} |
3796
|
|
|
|
3797
|
|
|
for key in self.search_terms.keys(): |
3798
|
|
|
params[key] = self.search_terms[key] |
3799
|
|
|
|
3800
|
|
|
return params |
3801
|
|
|
|
3802
|
|
|
def get_total_result_count(self): |
3803
|
|
|
"""Returns the total count of all the results.""" |
3804
|
|
|
|
3805
|
|
|
doc = self._request(self._ws_prefix + ".search", True) |
3806
|
|
|
|
3807
|
|
|
return _extract(doc, "opensearch:totalResults") |
3808
|
|
|
|
3809
|
|
|
def _retrieve_page(self, page_index): |
3810
|
|
|
"""Returns the node of matches to be processed""" |
3811
|
|
|
|
3812
|
|
|
params = self._get_params() |
3813
|
|
|
params["page"] = str(page_index) |
3814
|
|
|
doc = self._request(self._ws_prefix + ".search", True, params) |
3815
|
|
|
|
3816
|
|
|
return doc.getElementsByTagName(self._ws_prefix + "matches")[0] |
3817
|
|
|
|
3818
|
|
|
def _retrieve_next_page(self): |
3819
|
|
|
self._last_page_index += 1 |
3820
|
|
|
return self._retrieve_page(self._last_page_index) |
3821
|
|
|
|
3822
|
|
|
|
3823
|
|
|
class AlbumSearch(_Search): |
3824
|
|
|
"""Search for an album by name.""" |
3825
|
|
|
|
3826
|
|
|
def __init__(self, album_name, network): |
3827
|
|
|
|
3828
|
|
|
_Search.__init__(self, "album", {"album": album_name}, network) |
3829
|
|
|
|
3830
|
|
|
def get_next_page(self): |
3831
|
|
|
"""Returns the next page of results as a sequence of Album objects.""" |
3832
|
|
|
|
3833
|
|
|
master_node = self._retrieve_next_page() |
3834
|
|
|
|
3835
|
|
|
seq = [] |
3836
|
|
|
for node in master_node.getElementsByTagName("album"): |
3837
|
|
|
seq.append(Album( |
3838
|
|
|
_extract(node, "artist"), |
3839
|
|
|
_extract(node, "name"), |
3840
|
|
|
self.network)) |
3841
|
|
|
|
3842
|
|
|
return seq |
3843
|
|
|
|
3844
|
|
|
|
3845
|
|
|
class ArtistSearch(_Search): |
3846
|
|
|
"""Search for an artist by artist name.""" |
3847
|
|
|
|
3848
|
|
|
def __init__(self, artist_name, network): |
3849
|
|
|
_Search.__init__(self, "artist", {"artist": artist_name}, network) |
3850
|
|
|
|
3851
|
|
|
def get_next_page(self): |
3852
|
|
|
"""Returns the next page of results as a sequence of Artist objects.""" |
3853
|
|
|
|
3854
|
|
|
master_node = self._retrieve_next_page() |
3855
|
|
|
|
3856
|
|
|
seq = [] |
3857
|
|
|
for node in master_node.getElementsByTagName("artist"): |
3858
|
|
|
artist = Artist(_extract(node, "name"), self.network) |
3859
|
|
|
artist.listener_count = _number(_extract(node, "listeners")) |
3860
|
|
|
seq.append(artist) |
3861
|
|
|
|
3862
|
|
|
return seq |
3863
|
|
|
|
3864
|
|
|
|
3865
|
|
|
class TagSearch(_Search): |
3866
|
|
|
"""Search for a tag by tag name.""" |
3867
|
|
|
|
3868
|
|
|
def __init__(self, tag_name, network): |
3869
|
|
|
|
3870
|
|
|
_Search.__init__(self, "tag", {"tag": tag_name}, network) |
3871
|
|
|
|
3872
|
|
|
def get_next_page(self): |
3873
|
|
|
"""Returns the next page of results as a sequence of Tag objects.""" |
3874
|
|
|
|
3875
|
|
|
master_node = self._retrieve_next_page() |
3876
|
|
|
|
3877
|
|
|
seq = [] |
3878
|
|
|
for node in master_node.getElementsByTagName("tag"): |
3879
|
|
|
tag = Tag(_extract(node, "name"), self.network) |
3880
|
|
|
tag.tag_count = _number(_extract(node, "count")) |
3881
|
|
|
seq.append(tag) |
3882
|
|
|
|
3883
|
|
|
return seq |
3884
|
|
|
|
3885
|
|
|
|
3886
|
|
|
class TrackSearch(_Search): |
3887
|
|
|
""" |
3888
|
|
|
Search for a track by track title. If you don't want to narrow the results |
3889
|
|
|
down by specifying the artist name, set it to empty string. |
3890
|
|
|
""" |
3891
|
|
|
|
3892
|
|
|
def __init__(self, artist_name, track_title, network): |
3893
|
|
|
|
3894
|
|
|
_Search.__init__( |
3895
|
|
|
self, |
3896
|
|
|
"track", |
3897
|
|
|
{"track": track_title, "artist": artist_name}, |
3898
|
|
|
network) |
3899
|
|
|
|
3900
|
|
|
def get_next_page(self): |
3901
|
|
|
"""Returns the next page of results as a sequence of Track objects.""" |
3902
|
|
|
|
3903
|
|
|
master_node = self._retrieve_next_page() |
3904
|
|
|
|
3905
|
|
|
seq = [] |
3906
|
|
|
for node in master_node.getElementsByTagName("track"): |
3907
|
|
|
track = Track( |
3908
|
|
|
_extract(node, "artist"), |
3909
|
|
|
_extract(node, "name"), |
3910
|
|
|
self.network) |
3911
|
|
|
track.listener_count = _number(_extract(node, "listeners")) |
3912
|
|
|
seq.append(track) |
3913
|
|
|
|
3914
|
|
|
return seq |
3915
|
|
|
|
3916
|
|
|
|
3917
|
|
|
class VenueSearch(_Search): |
3918
|
|
|
""" |
3919
|
|
|
Search for a venue by its name. If you don't want to narrow the results |
3920
|
|
|
down by specifying a country, set it to empty string. |
3921
|
|
|
""" |
3922
|
|
|
|
3923
|
|
|
def __init__(self, venue_name, country_name, network): |
3924
|
|
|
|
3925
|
|
|
_Search.__init__( |
3926
|
|
|
self, |
3927
|
|
|
"venue", |
3928
|
|
|
{"venue": venue_name, "country": country_name}, |
3929
|
|
|
network) |
3930
|
|
|
|
3931
|
|
|
def get_next_page(self): |
3932
|
|
|
"""Returns the next page of results as a sequence of Track objects.""" |
3933
|
|
|
|
3934
|
|
|
master_node = self._retrieve_next_page() |
3935
|
|
|
|
3936
|
|
|
seq = [] |
3937
|
|
|
for node in master_node.getElementsByTagName("venue"): |
3938
|
|
|
seq.append(Venue(_extract(node, "id"), self.network)) |
3939
|
|
|
|
3940
|
|
|
return seq |
3941
|
|
|
|
3942
|
|
|
|
3943
|
|
|
class Venue(_BaseObject): |
3944
|
|
|
"""A venue where events are held.""" |
3945
|
|
|
|
3946
|
|
|
# TODO: waiting for a venue.getInfo web service to use. |
3947
|
|
|
# TODO: As an intermediate use case, can pass the venue DOM element when |
3948
|
|
|
# using Event.get_venue() to populate the venue info, if the venue.getInfo |
3949
|
|
|
# API call becomes available this workaround should be removed |
3950
|
|
|
|
3951
|
|
|
id = None |
3952
|
|
|
info = None |
3953
|
|
|
name = None |
3954
|
|
|
location = None |
3955
|
|
|
url = None |
3956
|
|
|
|
3957
|
|
|
__hash__ = _BaseObject.__hash__ |
3958
|
|
|
|
3959
|
|
|
def __init__(self, netword_id, network, venue_element=None): |
3960
|
|
|
_BaseObject.__init__(self, network, "venue") |
3961
|
|
|
|
3962
|
|
|
self.id = _number(netword_id) |
3963
|
|
|
if venue_element is not None: |
3964
|
|
|
self.info = _extract_element_tree(venue_element) |
3965
|
|
|
self.name = self.info.get('name') |
3966
|
|
|
self.url = self.info.get('url') |
3967
|
|
|
self.location = self.info.get('location') |
3968
|
|
|
|
3969
|
|
|
def __repr__(self): |
3970
|
|
|
return "pylast.Venue(%s, %s)" % (repr(self.id), repr(self.network)) |
3971
|
|
|
|
3972
|
|
|
@_string_output |
3973
|
|
|
def __str__(self): |
3974
|
|
|
return "Venue #" + str(self.id) |
3975
|
|
|
|
3976
|
|
|
def __eq__(self, other): |
3977
|
|
|
return self.get_id() == other.get_id() |
3978
|
|
|
|
3979
|
|
|
def _get_params(self): |
3980
|
|
|
return {self.ws_prefix: self.get_id()} |
3981
|
|
|
|
3982
|
|
|
def get_id(self): |
3983
|
|
|
"""Returns the id of the venue.""" |
3984
|
|
|
|
3985
|
|
|
return self.id |
3986
|
|
|
|
3987
|
|
|
def get_name(self): |
3988
|
|
|
"""Returns the name of the venue.""" |
3989
|
|
|
|
3990
|
|
|
return self.name |
3991
|
|
|
|
3992
|
|
|
def get_url(self): |
3993
|
|
|
"""Returns the URL of the venue page.""" |
3994
|
|
|
|
3995
|
|
|
return self.url |
3996
|
|
|
|
3997
|
|
|
def get_location(self): |
3998
|
|
|
"""Returns the location of the venue (dictionary).""" |
3999
|
|
|
|
4000
|
|
|
return self.location |
4001
|
|
|
|
4002
|
|
|
def get_upcoming_events(self): |
4003
|
|
|
"""Returns the upcoming events in this venue.""" |
4004
|
|
|
|
4005
|
|
|
doc = self._request(self.ws_prefix + ".getEvents", True) |
4006
|
|
|
|
4007
|
|
|
return _extract_events_from_doc(doc, self.network) |
4008
|
|
|
|
4009
|
|
|
def get_past_events(self): |
4010
|
|
|
"""Returns the past events held in this venue.""" |
4011
|
|
|
|
4012
|
|
|
doc = self._request(self.ws_prefix + ".getEvents", True) |
4013
|
|
|
|
4014
|
|
|
return _extract_events_from_doc(doc, self.network) |
4015
|
|
|
|
4016
|
|
|
|
4017
|
|
|
def md5(text): |
4018
|
|
|
"""Returns the md5 hash of a string.""" |
4019
|
|
|
|
4020
|
|
|
h = hashlib.md5() |
4021
|
|
|
h.update(_unicode(text).encode("utf-8")) |
4022
|
|
|
|
4023
|
|
|
return h.hexdigest() |
4024
|
|
|
|
4025
|
|
|
|
4026
|
|
|
def _unicode(text): |
4027
|
|
|
if isinstance(text, six.binary_type): |
4028
|
|
|
return six.text_type(text, "utf-8") |
4029
|
|
|
elif isinstance(text, six.text_type): |
4030
|
|
|
return text |
4031
|
|
|
else: |
4032
|
|
|
return six.text_type(text) |
4033
|
|
|
|
4034
|
|
|
|
4035
|
|
|
def _string(string): |
4036
|
|
|
"""For Python2 routines that can only process str type.""" |
4037
|
|
|
if isinstance(string, str): |
4038
|
|
|
return string |
4039
|
|
|
casted = six.text_type(string) |
4040
|
|
|
if sys.version_info[0] == 2: |
4041
|
|
|
casted = casted.encode("utf-8") |
4042
|
|
|
return casted |
4043
|
|
|
|
4044
|
|
|
|
4045
|
|
|
def cleanup_nodes(doc): |
4046
|
|
|
""" |
4047
|
|
|
Remove text nodes containing only whitespace |
4048
|
|
|
""" |
4049
|
|
|
for node in doc.documentElement.childNodes: |
4050
|
|
|
if node.nodeType == Node.TEXT_NODE and node.nodeValue.isspace(): |
4051
|
|
|
doc.documentElement.removeChild(node) |
4052
|
|
|
return doc |
4053
|
|
|
|
4054
|
|
|
|
4055
|
|
|
def _collect_nodes(limit, sender, method_name, cacheable, params=None): |
4056
|
|
|
""" |
4057
|
|
|
Returns a sequence of dom.Node objects about as close to limit as possible |
4058
|
|
|
""" |
4059
|
|
|
|
4060
|
|
|
if not params: |
4061
|
|
|
params = sender._get_params() |
4062
|
|
|
|
4063
|
|
|
nodes = [] |
4064
|
|
|
page = 1 |
4065
|
|
|
end_of_pages = False |
4066
|
|
|
|
4067
|
|
|
while not end_of_pages and (not limit or (limit and len(nodes) < limit)): |
4068
|
|
|
params["page"] = str(page) |
4069
|
|
|
doc = sender._request(method_name, cacheable, params) |
4070
|
|
|
doc = cleanup_nodes(doc) |
4071
|
|
|
|
4072
|
|
|
main = doc.documentElement.childNodes[0] |
4073
|
|
|
|
4074
|
|
|
if main.hasAttribute("totalPages"): |
4075
|
|
|
total_pages = _number(main.getAttribute("totalPages")) |
4076
|
|
|
elif main.hasAttribute("totalpages"): |
4077
|
|
|
total_pages = _number(main.getAttribute("totalpages")) |
4078
|
|
|
else: |
4079
|
|
|
raise Exception("No total pages attribute") |
4080
|
|
|
|
4081
|
|
|
for node in main.childNodes: |
4082
|
|
|
if not node.nodeType == xml.dom.Node.TEXT_NODE and ( |
4083
|
|
|
not limit or (len(nodes) < limit)): |
4084
|
|
|
nodes.append(node) |
4085
|
|
|
|
4086
|
|
|
if page >= total_pages: |
4087
|
|
|
end_of_pages = True |
4088
|
|
|
|
4089
|
|
|
page += 1 |
4090
|
|
|
|
4091
|
|
|
return nodes |
4092
|
|
|
|
4093
|
|
|
|
4094
|
|
|
def _extract(node, name, index=0): |
4095
|
|
|
"""Extracts a value from the xml string""" |
4096
|
|
|
|
4097
|
|
|
nodes = node.getElementsByTagName(name) |
4098
|
|
|
|
4099
|
|
|
if len(nodes): |
4100
|
|
|
if nodes[index].firstChild: |
4101
|
|
|
return _unescape_htmlentity(nodes[index].firstChild.data.strip()) |
4102
|
|
|
else: |
4103
|
|
|
return None |
4104
|
|
|
|
4105
|
|
|
|
4106
|
|
|
def _extract_element_tree(node): |
4107
|
|
|
"""Extract an element tree into a multi-level dictionary |
4108
|
|
|
|
4109
|
|
|
NB: If any elements have text nodes as well as nested |
4110
|
|
|
elements this will ignore the text nodes""" |
4111
|
|
|
|
4112
|
|
|
def _recurse_build_tree(rootNode, targetDict): |
4113
|
|
|
"""Recursively build a multi-level dict""" |
4114
|
|
|
|
4115
|
|
|
def _has_child_elements(rootNode): |
4116
|
|
|
"""Check if an element has any nested (child) elements""" |
4117
|
|
|
|
4118
|
|
|
for node in rootNode.childNodes: |
4119
|
|
|
if node.nodeType == node.ELEMENT_NODE: |
4120
|
|
|
return True |
4121
|
|
|
return False |
4122
|
|
|
|
4123
|
|
|
for node in rootNode.childNodes: |
4124
|
|
|
if node.nodeType == node.ELEMENT_NODE: |
4125
|
|
|
if _has_child_elements(node): |
4126
|
|
|
targetDict[node.tagName] = {} |
4127
|
|
|
_recurse_build_tree(node, targetDict[node.tagName]) |
4128
|
|
|
else: |
4129
|
|
|
val = None if node.firstChild is None else \ |
4130
|
|
|
_unescape_htmlentity(node.firstChild.data.strip()) |
4131
|
|
|
targetDict[node.tagName] = val |
4132
|
|
|
return targetDict |
4133
|
|
|
|
4134
|
|
|
return _recurse_build_tree(node, {}) |
4135
|
|
|
|
4136
|
|
|
|
4137
|
|
|
def _extract_all(node, name, limit_count=None): |
4138
|
|
|
"""Extracts all the values from the xml string. returning a list.""" |
4139
|
|
|
|
4140
|
|
|
seq = [] |
4141
|
|
|
|
4142
|
|
|
for i in range(0, len(node.getElementsByTagName(name))): |
4143
|
|
|
if len(seq) == limit_count: |
4144
|
|
|
break |
4145
|
|
|
|
4146
|
|
|
seq.append(_extract(node, name, i)) |
4147
|
|
|
|
4148
|
|
|
return seq |
4149
|
|
|
|
4150
|
|
|
|
4151
|
|
|
def _extract_top_artists(doc, network): |
4152
|
|
|
# TODO Maybe include the _request here too? |
4153
|
|
|
seq = [] |
4154
|
|
|
for node in doc.getElementsByTagName("artist"): |
4155
|
|
|
name = _extract(node, "name") |
4156
|
|
|
playcount = _extract(node, "playcount") |
4157
|
|
|
|
4158
|
|
|
seq.append(TopItem(Artist(name, network), playcount)) |
4159
|
|
|
|
4160
|
|
|
return seq |
4161
|
|
|
|
4162
|
|
|
|
4163
|
|
|
def _extract_top_albums(doc, network): |
4164
|
|
|
# TODO Maybe include the _request here too? |
4165
|
|
|
seq = [] |
4166
|
|
|
for node in doc.getElementsByTagName("album"): |
4167
|
|
|
name = _extract(node, "name") |
4168
|
|
|
artist = _extract(node, "name", 1) |
4169
|
|
|
playcount = _extract(node, "playcount") |
4170
|
|
|
|
4171
|
|
|
seq.append(TopItem(Album(artist, name, network), playcount)) |
4172
|
|
|
|
4173
|
|
|
return seq |
4174
|
|
|
|
4175
|
|
|
|
4176
|
|
|
def _extract_artists(doc, network): |
4177
|
|
|
seq = [] |
4178
|
|
|
for node in doc.getElementsByTagName("artist"): |
4179
|
|
|
seq.append(Artist(_extract(node, "name"), network)) |
4180
|
|
|
return seq |
4181
|
|
|
|
4182
|
|
|
|
4183
|
|
|
def _extract_albums(doc, network): |
4184
|
|
|
seq = [] |
4185
|
|
|
for node in doc.getElementsByTagName("album"): |
4186
|
|
|
name = _extract(node, "name") |
4187
|
|
|
artist = _extract(node, "name", 1) |
4188
|
|
|
seq.append(Album(artist, name, network)) |
4189
|
|
|
return seq |
4190
|
|
|
|
4191
|
|
|
|
4192
|
|
|
def _extract_tracks(doc, network): |
4193
|
|
|
seq = [] |
4194
|
|
|
for node in doc.getElementsByTagName("track"): |
4195
|
|
|
name = _extract(node, "name") |
4196
|
|
|
artist = _extract(node, "name", 1) |
4197
|
|
|
seq.append(Track(artist, name, network)) |
4198
|
|
|
return seq |
4199
|
|
|
|
4200
|
|
|
|
4201
|
|
|
def _extract_events_from_doc(doc, network): |
4202
|
|
|
events = [] |
4203
|
|
|
for node in doc.getElementsByTagName("event"): |
4204
|
|
|
events.append(Event(_extract(node, "id"), network)) |
4205
|
|
|
return events |
4206
|
|
|
|
4207
|
|
|
|
4208
|
|
|
def _url_safe(text): |
4209
|
|
|
"""Does all kinds of tricks on a text to make it safe to use in a url.""" |
4210
|
|
|
|
4211
|
|
|
return url_quote_plus(url_quote_plus(_string(text))).lower() |
4212
|
|
|
|
4213
|
|
|
|
4214
|
|
|
def _number(string): |
4215
|
|
|
""" |
4216
|
|
|
Extracts an int from a string. |
4217
|
|
|
Returns a 0 if None or an empty string was passed. |
4218
|
|
|
""" |
4219
|
|
|
|
4220
|
|
|
if not string: |
4221
|
|
|
return 0 |
4222
|
|
|
elif string == "": |
4223
|
|
|
return 0 |
4224
|
|
|
else: |
4225
|
|
|
try: |
4226
|
|
|
return int(string) |
4227
|
|
|
except ValueError: |
4228
|
|
|
return float(string) |
4229
|
|
|
|
4230
|
|
|
|
4231
|
|
|
def _unescape_htmlentity(string): |
4232
|
|
|
|
4233
|
|
|
# string = _unicode(string) |
4234
|
|
|
|
4235
|
|
|
mapping = htmlentitydefs.name2codepoint |
4236
|
|
|
for key in mapping: |
4237
|
|
|
string = string.replace("&%s;" % key, unichr(mapping[key])) |
4238
|
|
|
|
4239
|
|
|
return string |
4240
|
|
|
|
4241
|
|
|
|
4242
|
|
|
def extract_items(topitems_or_libraryitems): |
4243
|
|
|
""" |
4244
|
|
|
Extracts a sequence of items from a sequence of TopItem or |
4245
|
|
|
LibraryItem objects. |
4246
|
|
|
""" |
4247
|
|
|
|
4248
|
|
|
seq = [] |
4249
|
|
|
for i in topitems_or_libraryitems: |
4250
|
|
|
seq.append(i.item) |
4251
|
|
|
|
4252
|
|
|
return seq |
4253
|
|
|
|
4254
|
|
|
|
4255
|
|
|
class ScrobblingError(Exception): |
4256
|
|
|
def __init__(self, message): |
4257
|
|
|
Exception.__init__(self) |
4258
|
|
|
self.message = message |
4259
|
|
|
|
4260
|
|
|
@_string_output |
4261
|
|
|
def __str__(self): |
4262
|
|
|
return self.message |
4263
|
|
|
|
4264
|
|
|
|
4265
|
|
|
class BannedClientError(ScrobblingError): |
4266
|
|
|
def __init__(self): |
4267
|
|
|
ScrobblingError.__init__( |
4268
|
|
|
self, "This version of the client has been banned") |
4269
|
|
|
|
4270
|
|
|
|
4271
|
|
|
class BadAuthenticationError(ScrobblingError): |
4272
|
|
|
def __init__(self): |
4273
|
|
|
ScrobblingError.__init__(self, "Bad authentication token") |
4274
|
|
|
|
4275
|
|
|
|
4276
|
|
|
class BadTimeError(ScrobblingError): |
4277
|
|
|
def __init__(self): |
4278
|
|
|
ScrobblingError.__init__( |
4279
|
|
|
self, "Time provided is not close enough to current time") |
4280
|
|
|
|
4281
|
|
|
|
4282
|
|
|
class BadSessionError(ScrobblingError): |
4283
|
|
|
def __init__(self): |
4284
|
|
|
ScrobblingError.__init__( |
4285
|
|
|
self, "Bad session id, consider re-handshaking") |
4286
|
|
|
|
4287
|
|
|
|
4288
|
|
|
class _ScrobblerRequest(object): |
4289
|
|
|
|
4290
|
|
|
def __init__(self, url, params, network, request_type="POST"): |
4291
|
|
|
|
4292
|
|
|
for key in params: |
4293
|
|
|
params[key] = str(params[key]) |
4294
|
|
|
|
4295
|
|
|
self.params = params |
4296
|
|
|
self.type = request_type |
4297
|
|
|
(self.hostname, self.subdir) = url_split_host(url[len("http:"):]) |
4298
|
|
|
self.network = network |
4299
|
|
|
|
4300
|
|
|
def execute(self): |
4301
|
|
|
"""Returns a string response of this request.""" |
4302
|
|
|
|
4303
|
|
|
connection = HTTPConnection(self.hostname) |
4304
|
|
|
|
4305
|
|
|
data = [] |
4306
|
|
|
for name in self.params.keys(): |
4307
|
|
|
value = url_quote_plus(self.params[name]) |
4308
|
|
|
data.append('='.join((name, value))) |
4309
|
|
|
data = "&".join(data) |
4310
|
|
|
|
4311
|
|
|
headers = { |
4312
|
|
|
"Content-type": "application/x-www-form-urlencoded", |
4313
|
|
|
"Accept-Charset": "utf-8", |
4314
|
|
|
"User-Agent": "pylast" + "/" + __version__, |
4315
|
|
|
"HOST": self.hostname |
4316
|
|
|
} |
4317
|
|
|
|
4318
|
|
|
if self.type == "GET": |
4319
|
|
|
connection.request( |
4320
|
|
|
"GET", self.subdir + "?" + data, headers=headers) |
4321
|
|
|
else: |
4322
|
|
|
connection.request("POST", self.subdir, data, headers) |
4323
|
|
|
response = _unicode(connection.getresponse().read()) |
4324
|
|
|
|
4325
|
|
|
self._check_response_for_errors(response) |
4326
|
|
|
|
4327
|
|
|
return response |
4328
|
|
|
|
4329
|
|
|
def _check_response_for_errors(self, response): |
4330
|
|
|
""" |
4331
|
|
|
When passed a string response it checks for errors, raising any |
4332
|
|
|
exceptions as necessary. |
4333
|
|
|
""" |
4334
|
|
|
|
4335
|
|
|
lines = response.split("\n") |
4336
|
|
|
status_line = lines[0] |
4337
|
|
|
|
4338
|
|
|
if status_line == "OK": |
4339
|
|
|
return |
4340
|
|
|
elif status_line == "BANNED": |
4341
|
|
|
raise BannedClientError() |
4342
|
|
|
elif status_line == "BADAUTH": |
4343
|
|
|
raise BadAuthenticationError() |
4344
|
|
|
elif status_line == "BADTIME": |
4345
|
|
|
raise BadTimeError() |
4346
|
|
|
elif status_line == "BADSESSION": |
4347
|
|
|
raise BadSessionError() |
4348
|
|
|
elif status_line.startswith("FAILED "): |
4349
|
|
|
reason = status_line[status_line.find("FAILED ") + len("FAILED "):] |
4350
|
|
|
raise ScrobblingError(reason) |
4351
|
|
|
|
4352
|
|
|
|
4353
|
|
|
class Scrobbler(object): |
4354
|
|
|
"""A class for scrobbling tracks to Last.fm""" |
4355
|
|
|
|
4356
|
|
|
session_id = None |
4357
|
|
|
nowplaying_url = None |
4358
|
|
|
submissions_url = None |
4359
|
|
|
|
4360
|
|
|
def __init__(self, network, client_id, client_version): |
4361
|
|
|
self.client_id = client_id |
4362
|
|
|
self.client_version = client_version |
4363
|
|
|
self.username = network.username |
4364
|
|
|
self.password = network.password_hash |
4365
|
|
|
self.network = network |
4366
|
|
|
|
4367
|
|
|
def _do_handshake(self): |
4368
|
|
|
"""Handshakes with the server""" |
4369
|
|
|
|
4370
|
|
|
timestamp = str(int(time.time())) |
4371
|
|
|
|
4372
|
|
|
if self.password and self.username: |
4373
|
|
|
token = md5(self.password + timestamp) |
4374
|
|
|
elif self.network.api_key and self.network.api_secret and \ |
4375
|
|
|
self.network.session_key: |
4376
|
|
|
if not self.username: |
4377
|
|
|
self.username = self.network.get_authenticated_user()\ |
4378
|
|
|
.get_name() |
4379
|
|
|
token = md5(self.network.api_secret + timestamp) |
4380
|
|
|
|
4381
|
|
|
params = { |
4382
|
|
|
"hs": "true", "p": "1.2.1", "c": self.client_id, |
4383
|
|
|
"v": self.client_version, "u": self.username, "t": timestamp, |
4384
|
|
|
"a": token} |
4385
|
|
|
|
4386
|
|
|
if self.network.session_key and self.network.api_key: |
4387
|
|
|
params["sk"] = self.network.session_key |
4388
|
|
|
params["api_key"] = self.network.api_key |
4389
|
|
|
|
4390
|
|
|
server = self.network.submission_server |
4391
|
|
|
response = _ScrobblerRequest( |
4392
|
|
|
server, params, self.network, "GET").execute().split("\n") |
4393
|
|
|
|
4394
|
|
|
self.session_id = response[1] |
4395
|
|
|
self.nowplaying_url = response[2] |
4396
|
|
|
self.submissions_url = response[3] |
4397
|
|
|
|
4398
|
|
|
def _get_session_id(self, new=False): |
4399
|
|
|
""" |
4400
|
|
|
Returns a handshake. If new is true, then it will be requested from |
4401
|
|
|
the server even if one was cached. |
4402
|
|
|
""" |
4403
|
|
|
|
4404
|
|
|
if not self.session_id or new: |
4405
|
|
|
self._do_handshake() |
4406
|
|
|
|
4407
|
|
|
return self.session_id |
4408
|
|
|
|
4409
|
|
|
def report_now_playing( |
4410
|
|
|
self, artist, title, album="", duration="", track_number="", |
4411
|
|
|
mbid=""): |
4412
|
|
|
|
4413
|
|
|
_deprecation_warning( |
4414
|
|
|
"DeprecationWarning: Use Network.update_now_playing(...) instead") |
4415
|
|
|
|
4416
|
|
|
params = { |
4417
|
|
|
"s": self._get_session_id(), "a": artist, "t": title, |
4418
|
|
|
"b": album, "l": duration, "n": track_number, "m": mbid} |
4419
|
|
|
|
4420
|
|
|
try: |
4421
|
|
|
_ScrobblerRequest( |
4422
|
|
|
self.nowplaying_url, params, self.network |
4423
|
|
|
).execute() |
4424
|
|
|
except BadSessionError: |
4425
|
|
|
self._do_handshake() |
4426
|
|
|
self.report_now_playing( |
4427
|
|
|
artist, title, album, duration, track_number, mbid) |
4428
|
|
|
|
4429
|
|
|
def scrobble( |
4430
|
|
|
self, artist, title, time_started, source, mode, duration, |
4431
|
|
|
album="", track_number="", mbid=""): |
4432
|
|
|
"""Scrobble a track. parameters: |
4433
|
|
|
artist: Artist name. |
4434
|
|
|
title: Track title. |
4435
|
|
|
time_started: UTC timestamp of when the track started playing. |
4436
|
|
|
source: The source of the track |
4437
|
|
|
SCROBBLE_SOURCE_USER: Chosen by the user |
4438
|
|
|
(the most common value, unless you have a reason for |
4439
|
|
|
choosing otherwise, use this). |
4440
|
|
|
SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST: Non-personalised |
4441
|
|
|
broadcast (e.g. Shoutcast, BBC Radio 1). |
4442
|
|
|
SCROBBLE_SOURCE_PERSONALIZED_BROADCAST: Personalised |
4443
|
|
|
recommendation except Last.fm (e.g. Pandora, Launchcast). |
4444
|
|
|
SCROBBLE_SOURCE_LASTFM: ast.fm (any mode). In this case, the |
4445
|
|
|
5-digit recommendation_key value must be set. |
4446
|
|
|
SCROBBLE_SOURCE_UNKNOWN: Source unknown. |
4447
|
|
|
mode: The submission mode |
4448
|
|
|
SCROBBLE_MODE_PLAYED: The track was played. |
4449
|
|
|
SCROBBLE_MODE_LOVED: The user manually loved the track |
4450
|
|
|
(implies a listen) |
4451
|
|
|
SCROBBLE_MODE_SKIPPED: The track was skipped |
4452
|
|
|
(Only if source was Last.fm) |
4453
|
|
|
SCROBBLE_MODE_BANNED: The track was banned |
4454
|
|
|
(Only if source was Last.fm) |
4455
|
|
|
duration: Track duration in seconds. |
4456
|
|
|
album: The album name. |
4457
|
|
|
track_number: The track number on the album. |
4458
|
|
|
mbid: MusicBrainz ID. |
4459
|
|
|
""" |
4460
|
|
|
|
4461
|
|
|
_deprecation_warning( |
4462
|
|
|
"DeprecationWarning: Use Network.scrobble(...) instead") |
4463
|
|
|
|
4464
|
|
|
params = { |
4465
|
|
|
"s": self._get_session_id(), |
4466
|
|
|
"a[0]": _string(artist), |
4467
|
|
|
"t[0]": _string(title), |
4468
|
|
|
"i[0]": str(time_started), |
4469
|
|
|
"o[0]": source, |
4470
|
|
|
"r[0]": mode, |
4471
|
|
|
"l[0]": str(duration), |
4472
|
|
|
"b[0]": _string(album), |
4473
|
|
|
"n[0]": track_number, |
4474
|
|
|
"m[0]": mbid |
4475
|
|
|
} |
4476
|
|
|
|
4477
|
|
|
_ScrobblerRequest(self.submissions_url, params, self.network).execute() |
4478
|
|
|
|
4479
|
|
|
def scrobble_many(self, tracks): |
4480
|
|
|
""" |
4481
|
|
|
Scrobble several tracks at once. |
4482
|
|
|
|
4483
|
|
|
tracks: A sequence of a sequence of parameters for each track. |
4484
|
|
|
The order of parameters is the same as if passed to the |
4485
|
|
|
scrobble() method. |
4486
|
|
|
""" |
4487
|
|
|
|
4488
|
|
|
_deprecation_warning( |
4489
|
|
|
"DeprecationWarning: Use Network.scrobble_many(...) instead") |
4490
|
|
|
|
4491
|
|
|
remainder = [] |
4492
|
|
|
|
4493
|
|
|
if len(tracks) > 50: |
4494
|
|
|
remainder = tracks[50:] |
4495
|
|
|
tracks = tracks[:50] |
4496
|
|
|
|
4497
|
|
|
params = {"s": self._get_session_id()} |
4498
|
|
|
|
4499
|
|
|
i = 0 |
4500
|
|
|
for t in tracks: |
4501
|
|
|
_pad_list(t, 9, "") |
4502
|
|
|
params["a[%s]" % str(i)] = _string(t[0]) |
4503
|
|
|
params["t[%s]" % str(i)] = _string(t[1]) |
4504
|
|
|
params["i[%s]" % str(i)] = str(t[2]) |
4505
|
|
|
params["o[%s]" % str(i)] = t[3] |
4506
|
|
|
params["r[%s]" % str(i)] = t[4] |
4507
|
|
|
params["l[%s]" % str(i)] = str(t[5]) |
4508
|
|
|
params["b[%s]" % str(i)] = _string(t[6]) |
4509
|
|
|
params["n[%s]" % str(i)] = t[7] |
4510
|
|
|
params["m[%s]" % str(i)] = t[8] |
4511
|
|
|
|
4512
|
|
|
i += 1 |
4513
|
|
|
|
4514
|
|
|
_ScrobblerRequest(self.submissions_url, params, self.network).execute() |
4515
|
|
|
|
4516
|
|
|
if remainder: |
4517
|
|
|
self.scrobble_many(remainder) |
4518
|
|
|
|
4519
|
|
|
# End of file |
4520
|
|
|
|