1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# pylast - |
4
|
|
|
# A Python interface to Last.fm and Libre.fm |
5
|
|
|
# |
6
|
|
|
# Copyright 2008-2010 Amr Hassan |
7
|
|
|
# Copyright 2013-2016 hugovk |
8
|
|
|
# |
9
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
10
|
|
|
# you may not use this file except in compliance with the License. |
11
|
|
|
# You may obtain a copy of the License at |
12
|
|
|
# |
13
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
14
|
|
|
# |
15
|
|
|
# Unless required by applicable law or agreed to in writing, software |
16
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
17
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
18
|
|
|
# See the License for the specific language governing permissions and |
19
|
|
|
# limitations under the License. |
20
|
|
|
# |
21
|
|
|
# https://github.com/pylast/pylast |
22
|
|
|
|
23
|
1 |
|
import hashlib |
24
|
1 |
|
from xml.dom import minidom, Node |
25
|
1 |
|
import xml.dom |
26
|
1 |
|
import time |
27
|
1 |
|
import shelve |
28
|
1 |
|
import tempfile |
29
|
1 |
|
import sys |
30
|
1 |
|
import collections |
31
|
1 |
|
import warnings |
32
|
1 |
|
import re |
33
|
1 |
|
import six |
34
|
|
|
|
35
|
1 |
|
__version__ = '1.6.0' |
36
|
1 |
|
__author__ = 'Amr Hassan, hugovk' |
37
|
1 |
|
__copyright__ = "Copyright (C) 2008-2010 Amr Hassan, 2013-2016 hugovk" |
38
|
1 |
|
__license__ = "apache2" |
39
|
1 |
|
__email__ = '[email protected]' |
40
|
|
|
|
41
|
|
|
|
42
|
1 |
|
def _deprecation_warning(message): |
43
|
|
|
warnings.warn(message, DeprecationWarning) |
44
|
|
|
|
45
|
|
|
|
46
|
1 |
|
def _can_use_ssl_securely(): |
47
|
|
|
# Python 3.3 doesn't support create_default_context() but can be made to |
48
|
|
|
# work sanely. |
49
|
|
|
# <2.7.9 and <3.2 never did any SSL verification so don't do SSL there. |
50
|
|
|
# >3.4 and >2.7.9 has sane defaults so use SSL there. |
51
|
1 |
|
v = sys.version_info |
52
|
1 |
|
return v > (3, 3) or ((2, 7, 9) < v < (3, 0)) |
53
|
|
|
|
54
|
1 |
|
if _can_use_ssl_securely(): |
55
|
1 |
|
import ssl |
56
|
|
|
|
57
|
1 |
|
if sys.version_info[0] == 3: |
58
|
|
|
if _can_use_ssl_securely(): |
59
|
|
|
from http.client import HTTPSConnection |
60
|
|
|
else: |
61
|
|
|
from http.client import HTTPConnection |
62
|
|
|
import html.entities as htmlentitydefs |
63
|
|
|
from urllib.parse import splithost as url_split_host |
64
|
|
|
from urllib.parse import quote_plus as url_quote_plus |
65
|
|
|
|
66
|
|
|
unichr = chr |
67
|
|
|
|
68
|
1 |
|
elif sys.version_info[0] == 2: |
69
|
1 |
|
if _can_use_ssl_securely(): |
70
|
1 |
|
from httplib import HTTPSConnection |
71
|
|
|
else: |
72
|
|
|
from httplib import HTTPConnection |
73
|
1 |
|
import htmlentitydefs |
74
|
1 |
|
from urllib import splithost as url_split_host |
75
|
1 |
|
from urllib import quote_plus as url_quote_plus |
76
|
|
|
|
77
|
1 |
|
STATUS_INVALID_SERVICE = 2 |
78
|
1 |
|
STATUS_INVALID_METHOD = 3 |
79
|
1 |
|
STATUS_AUTH_FAILED = 4 |
80
|
1 |
|
STATUS_INVALID_FORMAT = 5 |
81
|
1 |
|
STATUS_INVALID_PARAMS = 6 |
82
|
1 |
|
STATUS_INVALID_RESOURCE = 7 |
83
|
1 |
|
STATUS_TOKEN_ERROR = 8 |
84
|
1 |
|
STATUS_INVALID_SK = 9 |
85
|
1 |
|
STATUS_INVALID_API_KEY = 10 |
86
|
1 |
|
STATUS_OFFLINE = 11 |
87
|
1 |
|
STATUS_SUBSCRIBERS_ONLY = 12 |
88
|
1 |
|
STATUS_INVALID_SIGNATURE = 13 |
89
|
1 |
|
STATUS_TOKEN_UNAUTHORIZED = 14 |
90
|
1 |
|
STATUS_TOKEN_EXPIRED = 15 |
91
|
|
|
|
92
|
1 |
|
EVENT_ATTENDING = '0' |
93
|
1 |
|
EVENT_MAYBE_ATTENDING = '1' |
94
|
1 |
|
EVENT_NOT_ATTENDING = '2' |
95
|
|
|
|
96
|
1 |
|
PERIOD_OVERALL = 'overall' |
97
|
1 |
|
PERIOD_7DAYS = '7day' |
98
|
1 |
|
PERIOD_1MONTH = '1month' |
99
|
1 |
|
PERIOD_3MONTHS = '3month' |
100
|
1 |
|
PERIOD_6MONTHS = '6month' |
101
|
1 |
|
PERIOD_12MONTHS = '12month' |
102
|
|
|
|
103
|
1 |
|
DOMAIN_ENGLISH = 0 |
104
|
1 |
|
DOMAIN_GERMAN = 1 |
105
|
1 |
|
DOMAIN_SPANISH = 2 |
106
|
1 |
|
DOMAIN_FRENCH = 3 |
107
|
1 |
|
DOMAIN_ITALIAN = 4 |
108
|
1 |
|
DOMAIN_POLISH = 5 |
109
|
1 |
|
DOMAIN_PORTUGUESE = 6 |
110
|
1 |
|
DOMAIN_SWEDISH = 7 |
111
|
1 |
|
DOMAIN_TURKISH = 8 |
112
|
1 |
|
DOMAIN_RUSSIAN = 9 |
113
|
1 |
|
DOMAIN_JAPANESE = 10 |
114
|
1 |
|
DOMAIN_CHINESE = 11 |
115
|
|
|
|
116
|
1 |
|
COVER_SMALL = 0 |
117
|
1 |
|
COVER_MEDIUM = 1 |
118
|
1 |
|
COVER_LARGE = 2 |
119
|
1 |
|
COVER_EXTRA_LARGE = 3 |
120
|
1 |
|
COVER_MEGA = 4 |
121
|
|
|
|
122
|
1 |
|
IMAGES_ORDER_POPULARITY = "popularity" |
123
|
1 |
|
IMAGES_ORDER_DATE = "dateadded" |
124
|
|
|
|
125
|
|
|
|
126
|
1 |
|
USER_MALE = 'Male' |
127
|
1 |
|
USER_FEMALE = 'Female' |
128
|
|
|
|
129
|
1 |
|
SCROBBLE_SOURCE_USER = "P" |
130
|
1 |
|
SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST = "R" |
131
|
1 |
|
SCROBBLE_SOURCE_PERSONALIZED_BROADCAST = "E" |
132
|
1 |
|
SCROBBLE_SOURCE_LASTFM = "L" |
133
|
1 |
|
SCROBBLE_SOURCE_UNKNOWN = "U" |
134
|
|
|
|
135
|
1 |
|
SCROBBLE_MODE_PLAYED = "" |
136
|
1 |
|
SCROBBLE_MODE_LOVED = "L" |
137
|
1 |
|
SCROBBLE_MODE_BANNED = "B" |
138
|
1 |
|
SCROBBLE_MODE_SKIPPED = "S" |
139
|
|
|
|
140
|
|
|
# From http://boodebr.org/main/python/all-about-python-and-unicode#UNI_XML |
141
|
1 |
|
RE_XML_ILLEGAL = (u'([\u0000-\u0008\u000b-\u000c\u000e-\u001f\ufffe-\uffff])' + |
142
|
|
|
u'|' + |
143
|
|
|
u'([%s-%s][^%s-%s])|([^%s-%s][%s-%s])|([%s-%s]$)|(^[%s-%s])' |
144
|
|
|
% |
145
|
|
|
(unichr(0xd800), unichr(0xdbff), unichr(0xdc00), |
146
|
|
|
unichr(0xdfff), unichr(0xd800), unichr(0xdbff), |
147
|
|
|
unichr(0xdc00), unichr(0xdfff), unichr(0xd800), |
148
|
|
|
unichr(0xdbff), unichr(0xdc00), unichr(0xdfff))) |
149
|
|
|
|
150
|
1 |
|
XML_ILLEGAL = re.compile(RE_XML_ILLEGAL) |
151
|
|
|
|
152
|
|
|
# Python <=3.3 doesn't support create_default_context() |
153
|
|
|
# <2.7.9 and <3.2 never did any SSL verification |
154
|
|
|
# FIXME This can be removed after 2017-09 when 3.3 is no longer supported and |
155
|
|
|
# pypy3 uses 3.4 or later, see |
156
|
|
|
# https://en.wikipedia.org/wiki/CPython#Version_history |
157
|
1 |
|
if sys.version_info[0] == 3 and sys.version_info[1] == 3: |
158
|
|
|
import certifi |
159
|
|
|
SSL_CONTEXT = ssl.SSLContext(ssl.PROTOCOL_TLSv1) |
160
|
|
|
SSL_CONTEXT.verify_mode = ssl.CERT_REQUIRED |
161
|
|
|
SSL_CONTEXT.options |= ssl.OP_NO_COMPRESSION |
162
|
|
|
# Intermediate from https://wiki.mozilla.org/Security/Server_Side_TLS |
163
|
|
|
# Create the cipher string |
164
|
|
|
cipher_string = """ |
165
|
|
|
ECDHE-ECDSA-CHACHA20-POLY1305 |
166
|
|
|
ECDHE-RSA-CHACHA20-POLY1305 |
167
|
|
|
ECDHE-ECDSA-AES128-GCM-SHA256 |
168
|
|
|
ECDHE-RSA-AES128-GCM-SHA256 |
169
|
|
|
ECDHE-ECDSA-AES256-GCM-SHA384 |
170
|
|
|
ECDHE-RSA-AES256-GCM-SHA384 |
171
|
|
|
DHE-RSA-AES128-GCM-SHA256 |
172
|
|
|
DHE-RSA-AES256-GCM-SHA384 |
173
|
|
|
ECDHE-ECDSA-AES128-SHA256 |
174
|
|
|
ECDHE-RSA-AES128-SHA256 |
175
|
|
|
ECDHE-ECDSA-AES128-SHA |
176
|
|
|
ECDHE-RSA-AES256-SHA384 |
177
|
|
|
ECDHE-RSA-AES128-SHA |
178
|
|
|
ECDHE-ECDSA-AES256-SHA384 |
179
|
|
|
ECDHE-ECDSA-AES256-SHA |
180
|
|
|
ECDHE-RSA-AES256-SHA |
181
|
|
|
DHE-RSA-AES128-SHA256 |
182
|
|
|
DHE-RSA-AES128-SHA |
183
|
|
|
DHE-RSA-AES256-SHA256 |
184
|
|
|
DHE-RSA-AES256-SHA |
185
|
|
|
ECDHE-ECDSA-DES-CBC3-SHA |
186
|
|
|
ECDHE-RSA-DES-CBC3-SHA |
187
|
|
|
EDH-RSA-DES-CBC3-SHA |
188
|
|
|
AES128-GCM-SHA256 |
189
|
|
|
AES256-GCM-SHA384 |
190
|
|
|
AES128-SHA256 |
191
|
|
|
AES256-SHA256 |
192
|
|
|
AES128-SHA |
193
|
|
|
AES256-SHA |
194
|
|
|
DES-CBC3-SHA |
195
|
|
|
!DSS |
196
|
|
|
""" |
197
|
|
|
cipher_string = ' '.join(cipher_string.split()) |
198
|
|
|
SSL_CONTEXT.set_ciphers(cipher_string) |
199
|
|
|
SSL_CONTEXT.load_verify_locations(certifi.where()) |
200
|
|
|
|
201
|
|
|
# Python >3.4 and >2.7.9 has sane defaults |
202
|
1 |
|
elif sys.version_info > (3, 4) or ((2, 7, 9) < sys.version_info < (3, 0)): |
203
|
1 |
|
SSL_CONTEXT = ssl.create_default_context() |
204
|
|
|
|
205
|
|
|
|
206
|
1 |
|
class _Network(object): |
207
|
|
|
""" |
208
|
|
|
A music social network website such as Last.fm or |
209
|
|
|
one with a Last.fm-compatible API. |
210
|
|
|
""" |
211
|
|
|
|
212
|
1 |
|
def __init__( |
213
|
|
|
self, name, homepage, ws_server, api_key, api_secret, session_key, |
214
|
|
|
submission_server, username, password_hash, domain_names, urls): |
215
|
|
|
""" |
216
|
|
|
name: the name of the network |
217
|
|
|
homepage: the homepage URL |
218
|
|
|
ws_server: the URL of the webservices server |
219
|
|
|
api_key: a provided API_KEY |
220
|
|
|
api_secret: a provided API_SECRET |
221
|
|
|
session_key: a generated session_key or None |
222
|
|
|
submission_server: the URL of the server to which tracks are |
223
|
|
|
submitted (scrobbled) |
224
|
|
|
username: a username of a valid user |
225
|
|
|
password_hash: the output of pylast.md5(password) where password is |
226
|
|
|
the user's password |
227
|
|
|
domain_names: a dict mapping each DOMAIN_* value to a string domain |
228
|
|
|
name |
229
|
|
|
urls: a dict mapping types to URLs |
230
|
|
|
|
231
|
|
|
if username and password_hash were provided and not session_key, |
232
|
|
|
session_key will be generated automatically when needed. |
233
|
|
|
|
234
|
|
|
Either a valid session_key or a combination of username and |
235
|
|
|
password_hash must be present for scrobbling. |
236
|
|
|
|
237
|
|
|
You should use a preconfigured network object through a |
238
|
|
|
get_*_network(...) method instead of creating an object |
239
|
|
|
of this class, unless you know what you're doing. |
240
|
|
|
""" |
241
|
|
|
|
242
|
1 |
|
self.name = name |
243
|
1 |
|
self.homepage = homepage |
244
|
1 |
|
self.ws_server = ws_server |
245
|
1 |
|
self.api_key = api_key |
246
|
1 |
|
self.api_secret = api_secret |
247
|
1 |
|
self.session_key = session_key |
248
|
1 |
|
self.submission_server = submission_server |
249
|
1 |
|
self.username = username |
250
|
1 |
|
self.password_hash = password_hash |
251
|
1 |
|
self.domain_names = domain_names |
252
|
1 |
|
self.urls = urls |
253
|
|
|
|
254
|
1 |
|
self.cache_backend = None |
255
|
1 |
|
self.proxy_enabled = False |
256
|
1 |
|
self.proxy = None |
257
|
1 |
|
self.last_call_time = 0 |
258
|
1 |
|
self.limit_rate = False |
259
|
|
|
|
260
|
|
|
# Generate a session_key if necessary |
261
|
1 |
|
if ((self.api_key and self.api_secret) and not self.session_key and |
262
|
|
|
(self.username and self.password_hash)): |
263
|
1 |
|
sk_gen = SessionKeyGenerator(self) |
264
|
1 |
|
self.session_key = sk_gen.get_session_key( |
265
|
|
|
self.username, self.password_hash) |
266
|
|
|
|
267
|
1 |
|
def __str__(self): |
268
|
1 |
|
return "%s Network" % self.name |
269
|
|
|
|
270
|
1 |
|
def get_artist(self, artist_name): |
271
|
|
|
""" |
272
|
|
|
Return an Artist object |
273
|
|
|
""" |
274
|
|
|
|
275
|
1 |
|
return Artist(artist_name, self) |
276
|
|
|
|
277
|
1 |
|
def get_track(self, artist, title): |
278
|
|
|
""" |
279
|
|
|
Return a Track object |
280
|
|
|
""" |
281
|
|
|
|
282
|
1 |
|
return Track(artist, title, self) |
283
|
|
|
|
284
|
1 |
|
def get_album(self, artist, title): |
285
|
|
|
""" |
286
|
|
|
Return an Album object |
287
|
|
|
""" |
288
|
|
|
|
289
|
1 |
|
return Album(artist, title, self) |
290
|
|
|
|
291
|
1 |
|
def get_authenticated_user(self): |
292
|
|
|
""" |
293
|
|
|
Returns the authenticated user |
294
|
|
|
""" |
295
|
|
|
|
296
|
1 |
|
return AuthenticatedUser(self) |
297
|
|
|
|
298
|
1 |
|
def get_country(self, country_name): |
299
|
|
|
""" |
300
|
|
|
Returns a country object |
301
|
|
|
""" |
302
|
|
|
|
303
|
1 |
|
return Country(country_name, self) |
304
|
|
|
|
305
|
1 |
|
def get_metro(self, metro_name, country_name): |
306
|
|
|
""" |
307
|
|
|
Returns a metro object |
308
|
|
|
""" |
309
|
|
|
|
310
|
1 |
|
return Metro(metro_name, country_name, self) |
311
|
|
|
|
312
|
1 |
|
def get_group(self, name): |
313
|
|
|
""" |
314
|
|
|
Returns a Group object |
315
|
|
|
""" |
316
|
|
|
|
317
|
1 |
|
return Group(name, self) |
318
|
|
|
|
319
|
1 |
|
def get_user(self, username): |
320
|
|
|
""" |
321
|
|
|
Returns a user object |
322
|
|
|
""" |
323
|
|
|
|
324
|
1 |
|
return User(username, self) |
325
|
|
|
|
326
|
1 |
|
def get_tag(self, name): |
327
|
|
|
""" |
328
|
|
|
Returns a tag object |
329
|
|
|
""" |
330
|
|
|
|
331
|
1 |
|
return Tag(name, self) |
332
|
|
|
|
333
|
1 |
|
def get_scrobbler(self, client_id, client_version): |
334
|
|
|
""" |
335
|
|
|
Returns a Scrobbler object used for submitting tracks to the server |
336
|
|
|
|
337
|
|
|
Quote from http://www.last.fm/api/submissions: |
338
|
|
|
======== |
339
|
|
|
Client identifiers are used to provide a centrally managed database |
340
|
|
|
of the client versions, allowing clients to be banned if they are |
341
|
|
|
found to be behaving undesirably. The client ID is associated with |
342
|
|
|
a version number on the server, however these are only incremented |
343
|
|
|
if a client is banned and do not have to reflect the version of the |
344
|
|
|
actual client application. |
345
|
|
|
|
346
|
|
|
During development, clients which have not been allocated an |
347
|
|
|
identifier should use the identifier tst, with a version number of |
348
|
|
|
1.0. Do not distribute code or client implementations which use |
349
|
|
|
this test identifier. Do not use the identifiers used by other |
350
|
|
|
clients. |
351
|
|
|
========= |
352
|
|
|
|
353
|
|
|
To obtain a new client identifier please contact: |
354
|
|
|
* Last.fm: [email protected] |
355
|
|
|
* # TODO: list others |
356
|
|
|
|
357
|
|
|
...and provide us with the name of your client and its homepage |
358
|
|
|
address. |
359
|
|
View Code Duplication |
""" |
|
|
|
|
360
|
|
|
|
361
|
|
|
_deprecation_warning( |
362
|
|
|
"Use _Network.scrobble(...), _Network.scrobble_many(...)," |
363
|
|
|
" and Network.update_now_playing(...) instead") |
364
|
|
|
|
365
|
|
|
return Scrobbler(self, client_id, client_version) |
366
|
|
|
|
367
|
1 |
|
def _get_language_domain(self, domain_language): |
368
|
|
|
""" |
369
|
|
|
Returns the mapped domain name of the network to a DOMAIN_* value |
370
|
|
|
""" |
371
|
|
|
|
372
|
1 |
|
if domain_language in self.domain_names: |
373
|
1 |
|
return self.domain_names[domain_language] |
374
|
|
|
|
375
|
1 |
|
def _get_url(self, domain, url_type): |
376
|
1 |
|
return "http://%s/%s" % ( |
377
|
|
|
self._get_language_domain(domain), self.urls[url_type]) |
378
|
|
|
|
379
|
1 |
|
def _get_ws_auth(self): |
380
|
|
|
""" |
381
|
|
|
Returns an (API_KEY, API_SECRET, SESSION_KEY) tuple. |
382
|
|
|
""" |
383
|
1 |
|
return (self.api_key, self.api_secret, self.session_key) |
384
|
|
|
|
385
|
1 |
|
def _delay_call(self): |
386
|
|
|
""" |
387
|
|
|
Makes sure that web service calls are at least 0.2 seconds apart. |
388
|
|
|
""" |
389
|
|
|
|
390
|
|
|
# Delay time in seconds from section 4.4 of http://www.last.fm/api/tos |
391
|
1 |
|
DELAY_TIME = 0.2 |
392
|
1 |
|
now = time.time() |
393
|
|
|
|
394
|
1 |
|
time_since_last = now - self.last_call_time |
395
|
|
|
|
396
|
1 |
|
if time_since_last < DELAY_TIME: |
397
|
|
|
time.sleep(DELAY_TIME - time_since_last) |
398
|
|
|
|
399
|
1 |
|
self.last_call_time = now |
400
|
|
|
|
401
|
1 |
|
def create_new_playlist(self, title, description): |
402
|
|
|
""" |
403
|
|
|
Creates a playlist for the authenticated user and returns it |
404
|
|
|
title: The title of the new playlist. |
405
|
|
|
description: The description of the new playlist. |
406
|
|
|
""" |
407
|
|
|
|
408
|
1 |
|
params = {} |
409
|
1 |
|
params['title'] = title |
410
|
1 |
|
params['description'] = description |
411
|
|
|
|
412
|
1 |
|
doc = _Request(self, 'playlist.create', params).execute(False) |
413
|
|
|
|
414
|
|
|
e_id = doc.getElementsByTagName("id")[0].firstChild.data |
415
|
|
|
user = doc.getElementsByTagName('playlists')[0].getAttribute('user') |
416
|
|
|
|
417
|
|
|
return Playlist(user, e_id, self) |
418
|
|
|
|
419
|
1 |
|
def get_top_artists(self, limit=None, cacheable=True): |
420
|
|
|
"""Returns the most played artists as a sequence of TopItem objects.""" |
421
|
|
|
|
422
|
1 |
|
params = {} |
423
|
1 |
|
if limit: |
424
|
1 |
|
params["limit"] = limit |
425
|
|
|
|
426
|
1 |
|
doc = _Request(self, "chart.getTopArtists", params).execute(cacheable) |
427
|
|
|
|
428
|
1 |
|
return _extract_top_artists(doc, self) |
429
|
|
|
|
430
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
431
|
|
|
"""Returns the most played tracks as a sequence of TopItem objects.""" |
432
|
|
|
|
433
|
1 |
|
params = {} |
434
|
1 |
|
if limit: |
435
|
1 |
|
params["limit"] = limit |
436
|
|
|
|
437
|
1 |
|
doc = _Request(self, "chart.getTopTracks", params).execute(cacheable) |
438
|
|
|
|
439
|
1 |
|
seq = [] |
440
|
1 |
|
for node in doc.getElementsByTagName("track"): |
441
|
1 |
|
title = _extract(node, "name") |
442
|
1 |
|
artist = _extract(node, "name", 1) |
443
|
1 |
|
track = Track(artist, title, self) |
444
|
1 |
|
weight = _number(_extract(node, "playcount")) |
445
|
1 |
|
seq.append(TopItem(track, weight)) |
446
|
|
|
|
447
|
1 |
|
return seq |
448
|
|
|
|
449
|
1 |
|
def get_top_tags(self, limit=None, cacheable=True): |
450
|
|
|
"""Returns the most used tags as a sequence of TopItem objects.""" |
451
|
|
|
|
452
|
|
|
# Last.fm has no "limit" parameter for tag.getTopTags |
453
|
|
|
# so we need to get all (250) and then limit locally |
454
|
1 |
|
doc = _Request(self, "tag.getTopTags").execute(cacheable) |
455
|
|
|
|
456
|
1 |
|
seq = [] |
457
|
1 |
|
for node in doc.getElementsByTagName("tag"): |
458
|
1 |
|
if limit and len(seq) >= limit: |
459
|
1 |
|
break |
460
|
1 |
|
tag = Tag(_extract(node, "name"), self) |
461
|
1 |
|
weight = _number(_extract(node, "count")) |
462
|
1 |
|
seq.append(TopItem(tag, weight)) |
463
|
|
|
|
464
|
1 |
|
return seq |
465
|
|
|
|
466
|
1 |
|
def get_geo_events( |
467
|
|
|
self, longitude=None, latitude=None, location=None, distance=None, |
468
|
|
|
tag=None, festivalsonly=None, limit=None, cacheable=True): |
469
|
|
|
""" |
470
|
|
|
Returns all events in a specific location by country or city name. |
471
|
|
|
Parameters: |
472
|
|
|
longitude (Optional) : Specifies a longitude value to retrieve events |
473
|
|
|
for (service returns nearby events by default) |
474
|
|
|
latitude (Optional) : Specifies a latitude value to retrieve events for |
475
|
|
|
(service returns nearby events by default) |
476
|
|
|
location (Optional) : Specifies a location to retrieve events for |
477
|
|
|
(service returns nearby events by default) |
478
|
|
|
distance (Optional) : Find events within a specified radius |
479
|
|
|
(in kilometres) |
480
|
|
|
tag (Optional) : Specifies a tag to filter by. |
481
|
|
|
festivalsonly[0|1] (Optional) : Whether only festivals should be |
482
|
|
|
returned, or all events. |
483
|
|
|
limit (Optional) : The number of results to fetch per page. |
484
|
|
|
Defaults to 10. |
485
|
|
|
""" |
486
|
|
|
|
487
|
1 |
|
params = {} |
488
|
|
|
|
489
|
1 |
|
if longitude: |
490
|
1 |
|
params["long"] = longitude |
491
|
1 |
|
if latitude: |
492
|
1 |
|
params["lat"] = latitude |
493
|
1 |
|
if location: |
494
|
1 |
|
params["location"] = location |
495
|
1 |
|
if limit: |
496
|
1 |
View Code Duplication |
params["limit"] = limit |
|
|
|
|
497
|
1 |
|
if distance: |
498
|
1 |
|
params["distance"] = distance |
499
|
1 |
|
if tag: |
500
|
1 |
|
params["tag"] = tag |
501
|
1 |
|
if festivalsonly: |
502
|
1 |
|
params["festivalsonly"] = 1 |
503
|
1 |
|
elif not festivalsonly: |
504
|
1 |
|
params["festivalsonly"] = 0 |
505
|
|
|
|
506
|
1 |
|
doc = _Request(self, "geo.getEvents", params).execute(cacheable) |
507
|
|
|
|
508
|
|
|
return _extract_events_from_doc(doc, self) |
509
|
|
|
|
510
|
1 |
|
def get_metro_weekly_chart_dates(self, cacheable=True): |
511
|
|
|
""" |
512
|
|
|
Returns a list of From and To tuples for the available metro charts. |
513
|
|
|
""" |
514
|
|
|
|
515
|
1 |
|
doc = _Request(self, "geo.getMetroWeeklyChartlist").execute(cacheable) |
516
|
|
|
|
517
|
|
|
seq = [] |
518
|
|
|
for node in doc.getElementsByTagName("chart"): |
519
|
|
|
seq.append((node.getAttribute("from"), node.getAttribute("to"))) |
520
|
|
|
|
521
|
|
|
return seq |
522
|
|
|
|
523
|
1 |
|
def get_metros(self, country=None, cacheable=True): |
524
|
|
|
""" |
525
|
|
|
Get a list of valid countries and metros for use in the other |
526
|
|
|
webservices. |
527
|
|
|
Parameters: |
528
|
|
|
country (Optional) : Optionally restrict the results to those Metros |
529
|
|
|
from a particular country, as defined by the ISO 3166-1 country |
530
|
|
|
names standard. |
531
|
|
|
""" |
532
|
1 |
|
params = {} |
533
|
|
|
|
534
|
1 |
|
if country: |
535
|
1 |
|
params["country"] = country |
536
|
|
|
|
537
|
1 |
|
doc = _Request(self, "geo.getMetros", params).execute(cacheable) |
538
|
|
|
|
539
|
|
|
metros = doc.getElementsByTagName("metro") |
540
|
|
|
seq = [] |
541
|
|
|
|
542
|
|
|
for metro in metros: |
543
|
|
|
name = _extract(metro, "name") |
544
|
|
|
country = _extract(metro, "country") |
545
|
|
|
|
546
|
|
|
seq.append(Metro(name, country, self)) |
547
|
|
|
|
548
|
|
|
return seq |
549
|
|
|
|
550
|
1 |
|
def get_geo_top_artists(self, country, limit=None, cacheable=True): |
551
|
|
|
"""Get the most popular artists on Last.fm by country. |
552
|
|
|
Parameters: |
553
|
|
|
country (Required) : A country name, as defined by the ISO 3166-1 |
554
|
|
|
country names standard. |
555
|
|
|
limit (Optional) : The number of results to fetch per page. |
556
|
|
|
Defaults to 50. |
557
|
|
|
""" |
558
|
1 |
|
params = {"country": country} |
559
|
|
|
|
560
|
1 |
|
if limit: |
561
|
1 |
|
params["limit"] = limit |
562
|
|
|
|
563
|
1 |
|
doc = _Request(self, "geo.getTopArtists", params).execute(cacheable) |
564
|
|
|
|
565
|
1 |
|
return _extract_top_artists(doc, self) |
566
|
|
|
|
567
|
1 |
|
def get_geo_top_tracks( |
568
|
|
|
self, country, location=None, limit=None, cacheable=True): |
569
|
|
|
"""Get the most popular tracks on Last.fm last week by country. |
570
|
|
|
Parameters: |
571
|
|
|
country (Required) : A country name, as defined by the ISO 3166-1 |
572
|
|
|
country names standard |
573
|
|
|
location (Optional) : A metro name, to fetch the charts for |
574
|
|
|
(must be within the country specified) |
575
|
|
|
limit (Optional) : The number of results to fetch per page. |
576
|
|
|
Defaults to 50. |
577
|
|
|
""" |
578
|
1 |
|
params = {"country": country} |
579
|
|
|
|
580
|
1 |
|
if location: |
581
|
1 |
|
params["location"] = location |
582
|
1 |
|
if limit: |
583
|
1 |
|
params["limit"] = limit |
584
|
|
|
|
585
|
1 |
|
doc = _Request(self, "geo.getTopTracks", params).execute(cacheable) |
586
|
|
|
|
587
|
1 |
|
tracks = doc.getElementsByTagName("track") |
588
|
1 |
|
seq = [] |
589
|
|
|
|
590
|
1 |
|
for track in tracks: |
591
|
1 |
|
title = _extract(track, "name") |
592
|
1 |
|
artist = _extract(track, "name", 1) |
593
|
1 |
|
listeners = _extract(track, "listeners") |
594
|
|
|
|
595
|
1 |
|
seq.append(TopItem(Track(artist, title, self), listeners)) |
596
|
|
|
|
597
|
1 |
|
return seq |
598
|
|
|
|
599
|
1 |
|
def enable_proxy(self, host, port): |
600
|
|
|
"""Enable a default web proxy""" |
601
|
|
|
|
602
|
|
|
self.proxy = [host, _number(port)] |
603
|
|
|
self.proxy_enabled = True |
604
|
|
|
|
605
|
1 |
|
def disable_proxy(self): |
606
|
|
|
"""Disable using the web proxy""" |
607
|
|
|
|
608
|
|
|
self.proxy_enabled = False |
609
|
|
|
|
610
|
1 |
|
def is_proxy_enabled(self): |
611
|
|
|
"""Returns True if a web proxy is enabled.""" |
612
|
|
|
|
613
|
1 |
|
return self.proxy_enabled |
614
|
|
|
|
615
|
1 |
|
def _get_proxy(self): |
616
|
|
|
"""Returns proxy details.""" |
617
|
|
|
|
618
|
|
|
return self.proxy |
619
|
|
|
|
620
|
1 |
|
def enable_rate_limit(self): |
621
|
|
|
"""Enables rate limiting for this network""" |
622
|
1 |
|
self.limit_rate = True |
623
|
|
|
|
624
|
1 |
|
def disable_rate_limit(self): |
625
|
|
|
"""Disables rate limiting for this network""" |
626
|
1 |
|
self.limit_rate = False |
627
|
|
|
|
628
|
1 |
|
def is_rate_limited(self): |
629
|
|
|
"""Return True if web service calls are rate limited""" |
630
|
1 |
|
return self.limit_rate |
631
|
|
|
|
632
|
1 |
|
def enable_caching(self, file_path=None): |
633
|
|
|
"""Enables caching request-wide for all cacheable calls. |
634
|
|
|
|
635
|
|
|
* file_path: A file path for the backend storage file. If |
636
|
|
|
None set, a temp file would probably be created, according the backend. |
637
|
|
|
""" |
638
|
|
|
|
639
|
1 |
|
if not file_path: |
640
|
1 |
|
file_path = tempfile.mktemp(prefix="pylast_tmp_") |
641
|
|
|
|
642
|
1 |
|
self.cache_backend = _ShelfCacheBackend(file_path) |
643
|
|
|
|
644
|
1 |
|
def disable_caching(self): |
645
|
|
|
"""Disables all caching features.""" |
646
|
|
|
|
647
|
|
|
self.cache_backend = None |
648
|
|
|
|
649
|
1 |
|
def is_caching_enabled(self): |
650
|
|
|
"""Returns True if caching is enabled.""" |
651
|
|
|
|
652
|
1 |
|
return not (self.cache_backend is None) |
653
|
|
|
|
654
|
1 |
|
def _get_cache_backend(self): |
655
|
|
|
|
656
|
1 |
|
return self.cache_backend |
657
|
|
|
|
658
|
1 |
|
def search_for_album(self, album_name): |
659
|
|
|
"""Searches for an album by its name. Returns a AlbumSearch object. |
660
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
661
|
|
|
|
662
|
|
|
return AlbumSearch(album_name, self) |
663
|
|
|
|
664
|
1 |
|
def search_for_artist(self, artist_name): |
665
|
|
|
"""Searches of an artist by its name. Returns a ArtistSearch object. |
666
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
667
|
|
|
|
668
|
|
|
return ArtistSearch(artist_name, self) |
669
|
|
|
|
670
|
1 |
|
def search_for_tag(self, tag_name): |
671
|
|
|
"""Searches of a tag by its name. Returns a TagSearch object. |
672
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
673
|
|
|
|
674
|
|
|
return TagSearch(tag_name, self) |
675
|
|
|
|
676
|
1 |
|
def search_for_track(self, artist_name, track_name): |
677
|
|
|
"""Searches of a track by its name and its artist. Set artist to an |
678
|
|
|
empty string if not available. |
679
|
|
|
Returns a TrackSearch object. |
680
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
681
|
|
|
|
682
|
1 |
|
return TrackSearch(artist_name, track_name, self) |
683
|
|
|
|
684
|
1 |
|
def search_for_venue(self, venue_name, country_name): |
685
|
|
|
"""Searches of a venue by its name and its country. Set country_name to |
686
|
|
|
an empty string if not available. |
687
|
|
|
Returns a VenueSearch object. |
688
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
689
|
|
|
|
690
|
1 |
|
return VenueSearch(venue_name, country_name, self) |
691
|
|
|
|
692
|
1 |
|
def get_track_by_mbid(self, mbid): |
693
|
|
|
"""Looks up a track by its MusicBrainz ID""" |
694
|
|
|
|
695
|
1 |
|
params = {"mbid": mbid} |
696
|
|
|
|
697
|
1 |
|
doc = _Request(self, "track.getInfo", params).execute(True) |
698
|
|
|
|
699
|
1 |
|
return Track(_extract(doc, "name", 1), _extract(doc, "name"), self) |
700
|
|
|
|
701
|
1 |
|
def get_artist_by_mbid(self, mbid): |
702
|
|
|
"""Loooks up an artist by its MusicBrainz ID""" |
703
|
|
|
|
704
|
1 |
|
params = {"mbid": mbid} |
705
|
|
|
|
706
|
1 |
|
doc = _Request(self, "artist.getInfo", params).execute(True) |
707
|
|
|
|
708
|
1 |
|
return Artist(_extract(doc, "name"), self) |
709
|
|
|
|
710
|
1 |
|
def get_album_by_mbid(self, mbid): |
711
|
|
|
"""Looks up an album by its MusicBrainz ID""" |
712
|
|
|
|
713
|
1 |
|
params = {"mbid": mbid} |
714
|
|
|
|
715
|
1 |
|
doc = _Request(self, "album.getInfo", params).execute(True) |
716
|
|
|
|
717
|
1 |
|
return Album(_extract(doc, "artist"), _extract(doc, "name"), self) |
718
|
|
|
|
719
|
1 |
|
def update_now_playing( |
720
|
|
|
self, artist, title, album=None, album_artist=None, |
721
|
|
|
duration=None, track_number=None, mbid=None, context=None): |
722
|
|
|
""" |
723
|
|
|
Used to notify Last.fm that a user has started listening to a track. |
724
|
|
|
|
725
|
|
|
Parameters: |
726
|
|
|
artist (Required) : The artist name |
727
|
|
|
title (Required) : The track title |
728
|
|
|
album (Optional) : The album name. |
729
|
|
|
album_artist (Optional) : The album artist - if this differs |
730
|
|
|
from the track artist. |
731
|
|
|
duration (Optional) : The length of the track in seconds. |
732
|
|
|
track_number (Optional) : The track number of the track on the |
733
|
|
|
album. |
734
|
|
|
mbid (Optional) : The MusicBrainz Track ID. |
735
|
|
|
context (Optional) : Sub-client version |
736
|
|
|
(not public, only enabled for certain API keys) |
737
|
|
|
""" |
738
|
|
|
|
739
|
1 |
|
params = {"track": title, "artist": artist} |
740
|
|
|
|
741
|
1 |
|
if album: |
742
|
1 |
|
params["album"] = album |
743
|
1 |
|
if album_artist: |
744
|
|
|
params["albumArtist"] = album_artist |
745
|
1 |
|
if context: |
746
|
|
|
params["context"] = context |
747
|
1 |
|
if track_number: |
748
|
1 |
|
params["trackNumber"] = track_number |
749
|
1 |
|
if mbid: |
750
|
|
|
params["mbid"] = mbid |
751
|
1 |
|
if duration: |
752
|
|
|
params["duration"] = duration |
753
|
|
|
|
754
|
1 |
|
_Request(self, "track.updateNowPlaying", params).execute() |
755
|
|
|
|
756
|
1 |
|
def scrobble( |
757
|
|
|
self, artist, title, timestamp, album=None, album_artist=None, |
758
|
|
|
track_number=None, duration=None, stream_id=None, context=None, |
759
|
|
|
mbid=None): |
760
|
|
|
|
761
|
|
|
"""Used to add a track-play to a user's profile. |
762
|
|
|
|
763
|
|
|
Parameters: |
764
|
|
|
artist (Required) : The artist name. |
765
|
|
|
title (Required) : The track name. |
766
|
|
|
timestamp (Required) : The time the track started playing, in UNIX |
767
|
|
|
timestamp format (integer number of seconds since 00:00:00, |
768
|
|
|
January 1st 1970 UTC). This must be in the UTC time zone. |
769
|
|
|
album (Optional) : The album name. |
770
|
|
|
album_artist (Optional) : The album artist - if this differs from |
771
|
|
|
the track artist. |
772
|
|
|
context (Optional) : Sub-client version (not public, only enabled |
773
|
|
|
for certain API keys) |
774
|
|
|
stream_id (Optional) : The stream id for this track received from |
775
|
|
|
the radio.getPlaylist service. |
776
|
|
|
track_number (Optional) : The track number of the track on the |
777
|
|
|
album. |
778
|
|
|
mbid (Optional) : The MusicBrainz Track ID. |
779
|
|
|
duration (Optional) : The length of the track in seconds. |
780
|
|
|
""" |
781
|
|
|
|
782
|
1 |
|
return self.scrobble_many(({ |
783
|
|
|
"artist": artist, "title": title, "timestamp": timestamp, |
784
|
|
|
"album": album, "album_artist": album_artist, |
785
|
|
|
"track_number": track_number, "duration": duration, |
786
|
|
|
"stream_id": stream_id, "context": context, "mbid": mbid},)) |
787
|
|
|
|
788
|
1 |
|
def scrobble_many(self, tracks): |
789
|
|
|
""" |
790
|
|
|
Used to scrobble a batch of tracks at once. The parameter tracks is a |
791
|
|
|
sequence of dicts per track containing the keyword arguments as if |
792
|
|
|
passed to the scrobble() method. |
793
|
|
|
""" |
794
|
|
|
|
795
|
1 |
|
tracks_to_scrobble = tracks[:50] |
796
|
1 |
|
if len(tracks) > 50: |
797
|
|
|
remaining_tracks = tracks[50:] |
798
|
|
|
else: |
799
|
1 |
|
remaining_tracks = None |
800
|
|
|
|
801
|
1 |
|
params = {} |
802
|
1 |
|
for i in range(len(tracks_to_scrobble)): |
803
|
|
|
|
804
|
1 |
|
params["artist[%d]" % i] = tracks_to_scrobble[i]["artist"] |
805
|
1 |
|
params["track[%d]" % i] = tracks_to_scrobble[i]["title"] |
806
|
|
|
|
807
|
1 |
|
additional_args = ( |
808
|
|
|
"timestamp", "album", "album_artist", "context", |
809
|
|
|
"stream_id", "track_number", "mbid", "duration") |
810
|
1 |
|
args_map_to = { # so friggin lazy |
811
|
|
|
"album_artist": "albumArtist", |
812
|
|
|
"track_number": "trackNumber", |
813
|
|
|
"stream_id": "streamID"} |
814
|
|
|
|
815
|
1 |
|
for arg in additional_args: |
816
|
|
|
|
817
|
1 |
|
if arg in tracks_to_scrobble[i] and tracks_to_scrobble[i][arg]: |
818
|
1 |
|
if arg in args_map_to: |
819
|
|
|
maps_to = args_map_to[arg] |
820
|
|
|
else: |
821
|
1 |
|
maps_to = arg |
822
|
|
|
|
823
|
1 |
|
params[ |
824
|
|
|
"%s[%d]" % (maps_to, i)] = tracks_to_scrobble[i][arg] |
825
|
|
|
|
826
|
1 |
|
_Request(self, "track.scrobble", params).execute() |
827
|
|
|
|
828
|
1 |
|
if remaining_tracks: |
829
|
|
|
self.scrobble_many(remaining_tracks) |
830
|
|
|
|
831
|
1 |
|
def get_play_links(self, link_type, things, cacheable=True): |
832
|
1 |
|
method = link_type + ".getPlaylinks" |
833
|
1 |
|
params = {} |
834
|
|
|
|
835
|
1 |
|
for i, thing in enumerate(things): |
836
|
1 |
|
if link_type == "artist": |
837
|
1 |
|
params['artist[' + str(i) + ']'] = thing |
838
|
1 |
|
elif link_type == "album": |
839
|
1 |
|
params['artist[' + str(i) + ']'] = thing.artist |
840
|
1 |
|
params['album[' + str(i) + ']'] = thing.title |
841
|
1 |
|
elif link_type == "track": |
842
|
1 |
|
params['artist[' + str(i) + ']'] = thing.artist |
843
|
1 |
|
params['track[' + str(i) + ']'] = thing.title |
844
|
|
|
|
845
|
1 |
|
doc = _Request(self, method, params).execute(cacheable) |
846
|
|
|
|
847
|
|
|
seq = [] |
848
|
|
|
|
849
|
|
|
for node in doc.getElementsByTagName("externalids"): |
850
|
|
|
spotify = _extract(node, "spotify") |
851
|
|
|
seq.append(spotify) |
852
|
|
|
|
853
|
|
|
return seq |
854
|
|
|
|
855
|
1 |
|
def get_artist_play_links(self, artists, cacheable=True): |
856
|
1 |
|
return self.get_play_links("artist", artists, cacheable) |
857
|
|
|
|
858
|
1 |
|
def get_album_play_links(self, albums, cacheable=True): |
859
|
1 |
|
return self.get_play_links("album", albums, cacheable) |
860
|
|
|
|
861
|
1 |
|
def get_track_play_links(self, tracks, cacheable=True): |
862
|
1 |
|
return self.get_play_links("track", tracks, cacheable) |
863
|
|
|
|
864
|
|
|
|
865
|
1 |
|
class LastFMNetwork(_Network): |
866
|
|
|
|
867
|
|
|
"""A Last.fm network object |
868
|
|
|
|
869
|
|
|
api_key: a provided API_KEY |
870
|
|
|
api_secret: a provided API_SECRET |
871
|
|
|
session_key: a generated session_key or None |
872
|
|
|
username: a username of a valid user |
873
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
874
|
|
|
user's password |
875
|
|
|
|
876
|
|
|
if username and password_hash were provided and not session_key, |
877
|
|
|
session_key will be generated automatically when needed. |
878
|
|
|
|
879
|
|
|
Either a valid session_key or a combination of username and password_hash |
880
|
|
|
must be present for scrobbling. |
881
|
|
|
|
882
|
|
|
Most read-only webservices only require an api_key and an api_secret, see |
883
|
|
|
about obtaining them from: |
884
|
|
|
http://www.last.fm/api/account |
885
|
|
|
""" |
886
|
|
|
|
887
|
1 |
|
def __init__( |
888
|
|
|
self, api_key="", api_secret="", session_key="", username="", |
889
|
|
|
password_hash=""): |
890
|
1 |
|
_Network.__init__( |
891
|
|
|
self, |
892
|
|
|
name="Last.fm", |
893
|
|
|
homepage="http://last.fm", |
894
|
|
|
ws_server=("ws.audioscrobbler.com", "/2.0/"), |
895
|
|
|
api_key=api_key, |
896
|
|
|
api_secret=api_secret, |
897
|
|
|
session_key=session_key, |
898
|
|
|
submission_server="http://post.audioscrobbler.com:80/", |
899
|
|
|
username=username, |
900
|
|
|
password_hash=password_hash, |
901
|
|
|
domain_names={ |
902
|
|
|
DOMAIN_ENGLISH: 'www.last.fm', |
903
|
|
|
DOMAIN_GERMAN: 'www.lastfm.de', |
904
|
|
|
DOMAIN_SPANISH: 'www.lastfm.es', |
905
|
|
|
DOMAIN_FRENCH: 'www.lastfm.fr', |
906
|
|
|
DOMAIN_ITALIAN: 'www.lastfm.it', |
907
|
|
|
DOMAIN_POLISH: 'www.lastfm.pl', |
908
|
|
|
DOMAIN_PORTUGUESE: 'www.lastfm.com.br', |
909
|
|
|
DOMAIN_SWEDISH: 'www.lastfm.se', |
910
|
|
|
DOMAIN_TURKISH: 'www.lastfm.com.tr', |
911
|
|
|
DOMAIN_RUSSIAN: 'www.lastfm.ru', |
912
|
|
|
DOMAIN_JAPANESE: 'www.lastfm.jp', |
913
|
|
|
DOMAIN_CHINESE: 'cn.last.fm', |
914
|
|
|
}, |
915
|
|
|
urls={ |
916
|
|
|
"album": "music/%(artist)s/%(album)s", |
917
|
|
|
"artist": "music/%(artist)s", |
918
|
|
|
"event": "event/%(id)s", |
919
|
|
|
"country": "place/%(country_name)s", |
920
|
|
|
"playlist": "user/%(user)s/library/playlists/%(appendix)s", |
921
|
|
|
"tag": "tag/%(name)s", |
922
|
|
|
"track": "music/%(artist)s/_/%(title)s", |
923
|
|
|
"group": "group/%(name)s", |
924
|
|
|
"user": "user/%(name)s", |
925
|
|
|
} |
926
|
|
|
) |
927
|
|
|
|
928
|
1 |
|
def __repr__(self): |
929
|
1 |
|
return "pylast.LastFMNetwork(%s)" % (", ".join( |
930
|
|
|
("'%s'" % self.api_key, |
931
|
|
|
"'%s'" % self.api_secret, |
932
|
|
|
"'%s'" % self.session_key, |
933
|
|
|
"'%s'" % self.username, |
934
|
|
|
"'%s'" % self.password_hash))) |
935
|
|
|
|
936
|
|
|
|
937
|
1 |
|
def get_lastfm_network( |
938
|
|
|
api_key="", api_secret="", session_key="", username="", |
939
|
|
|
password_hash=""): |
940
|
|
|
""" |
941
|
|
|
Returns a preconfigured _Network object for Last.fm |
942
|
|
|
|
943
|
|
|
api_key: a provided API_KEY |
944
|
|
|
api_secret: a provided API_SECRET |
945
|
|
|
session_key: a generated session_key or None |
946
|
|
|
username: a username of a valid user |
947
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
948
|
|
|
user's password |
949
|
|
|
|
950
|
|
|
if username and password_hash were provided and not session_key, |
951
|
|
|
session_key will be generated automatically when needed. |
952
|
|
|
|
953
|
|
|
Either a valid session_key or a combination of username and password_hash |
954
|
|
|
must be present for scrobbling. |
955
|
|
|
|
956
|
|
|
Most read-only webservices only require an api_key and an api_secret, see |
957
|
|
|
about obtaining them from: |
958
|
|
|
http://www.last.fm/api/account |
959
|
|
|
""" |
960
|
|
|
|
961
|
|
|
_deprecation_warning("Create a LastFMNetwork object instead") |
962
|
|
|
|
963
|
|
|
return LastFMNetwork( |
964
|
|
|
api_key, api_secret, session_key, username, password_hash) |
965
|
|
|
|
966
|
|
|
|
967
|
1 |
|
class LibreFMNetwork(_Network): |
968
|
|
|
""" |
969
|
|
|
A preconfigured _Network object for Libre.fm |
970
|
|
|
|
971
|
|
|
api_key: a provided API_KEY |
972
|
|
|
api_secret: a provided API_SECRET |
973
|
|
|
session_key: a generated session_key or None |
974
|
|
|
username: a username of a valid user |
975
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
976
|
|
|
user's password |
977
|
|
|
|
978
|
|
|
if username and password_hash were provided and not session_key, |
979
|
|
|
session_key will be generated automatically when needed. |
980
|
|
|
""" |
981
|
|
|
|
982
|
1 |
|
def __init__( |
983
|
|
|
self, api_key="", api_secret="", session_key="", username="", |
984
|
|
|
password_hash=""): |
985
|
|
|
|
986
|
1 |
|
_Network.__init__( |
987
|
|
|
self, |
988
|
|
|
name="Libre.fm", |
989
|
|
|
homepage="http://libre.fm", |
990
|
|
|
ws_server=("libre.fm", "/2.0/"), |
991
|
|
|
api_key=api_key, |
992
|
|
|
api_secret=api_secret, |
993
|
|
|
session_key=session_key, |
994
|
|
|
submission_server="http://turtle.libre.fm:80/", |
995
|
|
|
username=username, |
996
|
|
|
password_hash=password_hash, |
997
|
|
|
domain_names={ |
998
|
|
|
DOMAIN_ENGLISH: "libre.fm", |
999
|
|
|
DOMAIN_GERMAN: "libre.fm", |
1000
|
|
|
DOMAIN_SPANISH: "libre.fm", |
1001
|
|
|
DOMAIN_FRENCH: "libre.fm", |
1002
|
|
|
DOMAIN_ITALIAN: "libre.fm", |
1003
|
|
|
DOMAIN_POLISH: "libre.fm", |
1004
|
|
|
DOMAIN_PORTUGUESE: "libre.fm", |
1005
|
|
|
DOMAIN_SWEDISH: "libre.fm", |
1006
|
|
|
DOMAIN_TURKISH: "libre.fm", |
1007
|
|
|
DOMAIN_RUSSIAN: "libre.fm", |
1008
|
|
|
DOMAIN_JAPANESE: "libre.fm", |
1009
|
|
|
DOMAIN_CHINESE: "libre.fm", |
1010
|
|
|
}, |
1011
|
|
|
urls={ |
1012
|
|
|
"album": "artist/%(artist)s/album/%(album)s", |
1013
|
|
|
"artist": "artist/%(artist)s", |
1014
|
|
|
"event": "event/%(id)s", |
1015
|
|
|
"country": "place/%(country_name)s", |
1016
|
|
|
"playlist": "user/%(user)s/library/playlists/%(appendix)s", |
1017
|
|
|
"tag": "tag/%(name)s", |
1018
|
|
|
"track": "music/%(artist)s/_/%(title)s", |
1019
|
|
|
"group": "group/%(name)s", |
1020
|
|
|
"user": "user/%(name)s", |
1021
|
|
|
} |
1022
|
|
|
) |
1023
|
|
|
|
1024
|
1 |
|
def __repr__(self): |
1025
|
|
|
return "pylast.LibreFMNetwork(%s)" % (", ".join( |
1026
|
|
|
("'%s'" % self.api_key, |
1027
|
|
|
"'%s'" % self.api_secret, |
1028
|
|
|
"'%s'" % self.session_key, |
1029
|
|
|
"'%s'" % self.username, |
1030
|
|
|
"'%s'" % self.password_hash))) |
1031
|
|
|
|
1032
|
|
|
|
1033
|
1 |
|
def get_librefm_network( |
1034
|
|
|
api_key="", api_secret="", session_key="", username="", |
1035
|
|
|
password_hash=""): |
1036
|
|
|
""" |
1037
|
|
|
Returns a preconfigured _Network object for Libre.fm |
1038
|
|
|
|
1039
|
|
|
api_key: a provided API_KEY |
1040
|
|
|
api_secret: a provided API_SECRET |
1041
|
|
|
session_key: a generated session_key or None |
1042
|
|
|
username: a username of a valid user |
1043
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
1044
|
|
|
user's password |
1045
|
|
|
|
1046
|
|
|
if username and password_hash were provided and not session_key, |
1047
|
|
|
session_key will be generated automatically when needed. |
1048
|
|
|
""" |
1049
|
|
|
|
1050
|
|
|
_deprecation_warning( |
1051
|
|
|
"DeprecationWarning: Create a LibreFMNetwork object instead") |
1052
|
|
|
|
1053
|
|
|
return LibreFMNetwork( |
1054
|
|
|
api_key, api_secret, session_key, username, password_hash) |
1055
|
|
|
|
1056
|
|
|
|
1057
|
1 |
|
class _ShelfCacheBackend(object): |
1058
|
|
|
"""Used as a backend for caching cacheable requests.""" |
1059
|
1 |
|
def __init__(self, file_path=None): |
1060
|
1 |
|
self.shelf = shelve.open(file_path) |
1061
|
|
|
|
1062
|
1 |
|
def __iter__(self): |
1063
|
1 |
|
return iter(self.shelf.keys()) |
1064
|
|
|
|
1065
|
1 |
|
def get_xml(self, key): |
1066
|
|
|
return self.shelf[key] |
1067
|
|
|
|
1068
|
1 |
|
def set_xml(self, key, xml_string): |
1069
|
|
|
self.shelf[key] = xml_string |
1070
|
|
|
|
1071
|
|
|
|
1072
|
1 |
|
class _Request(object): |
1073
|
|
|
"""Representing an abstract web service operation.""" |
1074
|
|
|
|
1075
|
1 |
|
def __init__(self, network, method_name, params={}): |
1076
|
|
|
|
1077
|
1 |
|
self.network = network |
1078
|
1 |
|
self.params = {} |
1079
|
|
|
|
1080
|
1 |
|
for key in params: |
1081
|
1 |
|
self.params[key] = _unicode(params[key]) |
1082
|
|
|
|
1083
|
1 |
|
(self.api_key, self.api_secret, self.session_key) = \ |
1084
|
|
|
network._get_ws_auth() |
1085
|
|
|
|
1086
|
1 |
|
self.params["api_key"] = self.api_key |
1087
|
1 |
|
self.params["method"] = method_name |
1088
|
|
|
|
1089
|
1 |
|
if network.is_caching_enabled(): |
1090
|
1 |
|
self.cache = network._get_cache_backend() |
1091
|
|
|
|
1092
|
1 |
|
if self.session_key: |
1093
|
1 |
|
self.params["sk"] = self.session_key |
1094
|
1 |
|
self.sign_it() |
1095
|
|
|
|
1096
|
1 |
|
def sign_it(self): |
1097
|
|
|
"""Sign this request.""" |
1098
|
|
|
|
1099
|
1 |
|
if "api_sig" not in self.params.keys(): |
1100
|
1 |
|
self.params['api_sig'] = self._get_signature() |
1101
|
|
|
|
1102
|
1 |
|
def _get_signature(self): |
1103
|
|
|
""" |
1104
|
|
|
Returns a 32-character hexadecimal md5 hash of the signature string. |
1105
|
|
|
""" |
1106
|
|
|
|
1107
|
1 |
|
keys = list(self.params.keys()) |
1108
|
|
|
|
1109
|
1 |
|
keys.sort() |
1110
|
|
|
|
1111
|
1 |
|
string = "" |
1112
|
|
|
|
1113
|
1 |
|
for name in keys: |
1114
|
1 |
|
string += name |
1115
|
1 |
|
string += self.params[name] |
1116
|
|
|
|
1117
|
1 |
|
string += self.api_secret |
1118
|
|
|
|
1119
|
1 |
|
return md5(string) |
1120
|
|
|
|
1121
|
1 |
|
def _get_cache_key(self): |
1122
|
|
|
""" |
1123
|
|
|
The cache key is a string of concatenated sorted names and values. |
1124
|
|
|
""" |
1125
|
|
|
|
1126
|
1 |
|
keys = list(self.params.keys()) |
1127
|
1 |
|
keys.sort() |
1128
|
|
|
|
1129
|
1 |
|
cache_key = str() |
1130
|
|
|
|
1131
|
1 |
|
for key in keys: |
1132
|
1 |
|
if key != "api_sig" and key != "api_key" and key != "sk": |
1133
|
1 |
|
cache_key += key + self.params[key] |
1134
|
|
|
|
1135
|
1 |
|
return hashlib.sha1(cache_key.encode("utf-8")).hexdigest() |
1136
|
|
|
|
1137
|
1 |
|
def _get_cached_response(self): |
1138
|
|
|
"""Returns a file object of the cached response.""" |
1139
|
|
|
|
1140
|
1 |
|
if not self._is_cached(): |
1141
|
1 |
|
response = self._download_response() |
1142
|
|
|
self.cache.set_xml(self._get_cache_key(), response) |
1143
|
|
|
|
1144
|
|
|
return self.cache.get_xml(self._get_cache_key()) |
1145
|
|
|
|
1146
|
1 |
|
def _is_cached(self): |
1147
|
|
|
"""Returns True if the request is already in cache.""" |
1148
|
|
|
|
1149
|
1 |
|
return self._get_cache_key() in self.cache |
1150
|
|
|
|
1151
|
1 |
|
def _download_response(self): |
1152
|
|
|
"""Returns a response body string from the server.""" |
1153
|
|
|
|
1154
|
1 |
|
if self.network.limit_rate: |
1155
|
1 |
|
self.network._delay_call() |
1156
|
|
|
|
1157
|
1 |
|
data = [] |
1158
|
1 |
|
for name in self.params.keys(): |
1159
|
1 |
|
data.append('='.join(( |
1160
|
|
|
name, url_quote_plus(_string(self.params[name]))))) |
1161
|
1 |
|
data = '&'.join(data) |
1162
|
|
|
|
1163
|
1 |
|
headers = { |
1164
|
|
|
"Content-type": "application/x-www-form-urlencoded", |
1165
|
|
|
'Accept-Charset': 'utf-8', |
1166
|
|
|
'User-Agent': "pylast" + '/' + __version__ |
1167
|
|
|
} |
1168
|
|
|
|
1169
|
1 |
|
(HOST_NAME, HOST_SUBDIR) = self.network.ws_server |
1170
|
|
|
|
1171
|
1 |
|
if self.network.is_proxy_enabled(): |
1172
|
|
|
if _can_use_ssl_securely(): |
1173
|
|
|
conn = HTTPSConnection( |
1174
|
|
|
context=SSL_CONTEXT, |
1175
|
|
|
host=self.network._get_proxy()[0], |
1176
|
|
|
port=self.network._get_proxy()[1]) |
1177
|
|
|
else: |
1178
|
|
|
conn = HTTPConnection( |
1179
|
|
|
host=self.network._get_proxy()[0], |
1180
|
|
|
port=self.network._get_proxy()[1]) |
1181
|
|
|
|
1182
|
|
|
try: |
1183
|
|
|
conn.request( |
1184
|
|
|
method='POST', url="http://" + HOST_NAME + HOST_SUBDIR, |
1185
|
|
|
body=data, headers=headers) |
1186
|
|
|
except Exception as e: |
1187
|
|
|
raise NetworkError(self.network, e) |
1188
|
|
|
|
1189
|
|
|
else: |
1190
|
1 |
|
if _can_use_ssl_securely(): |
1191
|
1 |
|
conn = HTTPSConnection( |
1192
|
|
|
context=SSL_CONTEXT, |
1193
|
|
|
host=HOST_NAME |
1194
|
|
|
) |
1195
|
|
|
else: |
1196
|
|
|
conn = HTTPConnection( |
1197
|
|
|
host=HOST_NAME |
1198
|
|
|
) |
1199
|
|
|
|
1200
|
1 |
|
try: |
1201
|
1 |
|
conn.request( |
1202
|
|
|
method='POST', url=HOST_SUBDIR, body=data, headers=headers) |
1203
|
|
|
except Exception as e: |
1204
|
|
|
raise NetworkError(self.network, e) |
1205
|
|
|
|
1206
|
1 |
|
try: |
1207
|
1 |
|
response_text = _unicode(conn.getresponse().read()) |
1208
|
|
|
except Exception as e: |
1209
|
|
|
raise MalformedResponseError(self.network, e) |
1210
|
|
|
|
1211
|
1 |
|
response_text = XML_ILLEGAL.sub("?", response_text) |
1212
|
|
|
|
1213
|
1 |
|
self._check_response_for_errors(response_text) |
1214
|
1 |
|
return response_text |
1215
|
|
|
|
1216
|
1 |
|
def execute(self, cacheable=False): |
1217
|
|
|
"""Returns the XML DOM response of the POST Request from the server""" |
1218
|
|
|
|
1219
|
1 |
|
if self.network.is_caching_enabled() and cacheable: |
1220
|
1 |
|
response = self._get_cached_response() |
1221
|
|
|
else: |
1222
|
1 |
|
response = self._download_response() |
1223
|
|
|
|
1224
|
1 |
|
return minidom.parseString(_string(response).replace( |
1225
|
|
|
"opensearch:", "")) |
1226
|
|
|
|
1227
|
1 |
|
def _check_response_for_errors(self, response): |
1228
|
|
|
"""Checks the response for errors and raises one if any exists.""" |
1229
|
|
|
|
1230
|
1 |
|
try: |
1231
|
1 |
|
doc = minidom.parseString(_string(response).replace( |
1232
|
|
|
"opensearch:", "")) |
1233
|
|
|
except Exception as e: |
1234
|
|
|
raise MalformedResponseError(self.network, e) |
1235
|
|
|
|
1236
|
1 |
|
e = doc.getElementsByTagName('lfm')[0] |
1237
|
|
|
|
1238
|
1 |
|
if e.getAttribute('status') != "ok": |
1239
|
1 |
|
e = doc.getElementsByTagName('error')[0] |
1240
|
1 |
|
status = e.getAttribute('code') |
1241
|
1 |
|
details = e.firstChild.data.strip() |
1242
|
1 |
|
raise WSError(self.network, status, details) |
1243
|
|
|
|
1244
|
|
|
|
1245
|
1 |
|
class SessionKeyGenerator(object): |
1246
|
|
|
"""Methods of generating a session key: |
1247
|
|
|
1) Web Authentication: |
1248
|
|
|
a. network = get_*_network(API_KEY, API_SECRET) |
1249
|
|
|
b. sg = SessionKeyGenerator(network) |
1250
|
|
|
c. url = sg.get_web_auth_url() |
1251
|
|
|
d. Ask the user to open the url and authorize you, and wait for it. |
1252
|
|
|
e. session_key = sg.get_web_auth_session_key(url) |
1253
|
|
|
2) Username and Password Authentication: |
1254
|
|
|
a. network = get_*_network(API_KEY, API_SECRET) |
1255
|
|
|
b. username = raw_input("Please enter your username: ") |
1256
|
|
|
c. password_hash = pylast.md5(raw_input("Please enter your password: ") |
1257
|
|
|
d. session_key = SessionKeyGenerator(network).get_session_key(username, |
1258
|
|
|
password_hash) |
1259
|
|
|
|
1260
|
|
|
A session key's lifetime is infinite, unless the user revokes the rights |
1261
|
|
|
of the given API Key. |
1262
|
|
|
|
1263
|
|
|
If you create a Network object with just a API_KEY and API_SECRET and a |
1264
|
|
|
username and a password_hash, a SESSION_KEY will be automatically generated |
1265
|
|
|
for that network and stored in it so you don't have to do this manually, |
1266
|
|
|
unless you want to. |
1267
|
|
|
""" |
1268
|
|
|
|
1269
|
1 |
|
def __init__(self, network): |
1270
|
1 |
|
self.network = network |
1271
|
1 |
|
self.web_auth_tokens = {} |
1272
|
|
|
|
1273
|
1 |
|
def _get_web_auth_token(self): |
1274
|
|
|
""" |
1275
|
|
|
Retrieves a token from the network for web authentication. |
1276
|
|
|
The token then has to be authorized from getAuthURL before creating |
1277
|
|
|
session. |
1278
|
|
|
""" |
1279
|
|
|
|
1280
|
|
|
request = _Request(self.network, 'auth.getToken') |
1281
|
|
|
|
1282
|
|
|
# default action is that a request is signed only when |
1283
|
|
|
# a session key is provided. |
1284
|
|
|
request.sign_it() |
1285
|
|
|
|
1286
|
|
|
doc = request.execute() |
1287
|
|
|
|
1288
|
|
|
e = doc.getElementsByTagName('token')[0] |
1289
|
|
|
return e.firstChild.data |
1290
|
|
|
|
1291
|
1 |
|
def get_web_auth_url(self): |
1292
|
|
|
""" |
1293
|
|
|
The user must open this page, and you first, then |
1294
|
|
|
call get_web_auth_session_key(url) after that. |
1295
|
|
|
""" |
1296
|
|
|
|
1297
|
|
|
token = self._get_web_auth_token() |
1298
|
|
|
|
1299
|
|
|
url = '%(homepage)s/api/auth/?api_key=%(api)s&token=%(token)s' % \ |
1300
|
|
|
{"homepage": self.network.homepage, |
1301
|
|
|
"api": self.network.api_key, "token": token} |
1302
|
|
|
|
1303
|
|
|
self.web_auth_tokens[url] = token |
1304
|
|
|
|
1305
|
|
|
return url |
1306
|
|
|
|
1307
|
1 |
|
def get_web_auth_session_key(self, url): |
1308
|
|
|
""" |
1309
|
|
|
Retrieves the session key of a web authorization process by its url. |
1310
|
|
|
""" |
1311
|
|
|
|
1312
|
|
|
if url in self.web_auth_tokens.keys(): |
1313
|
|
|
token = self.web_auth_tokens[url] |
1314
|
|
|
else: |
1315
|
|
|
# That's going to raise a WSError of an unauthorized token when the |
1316
|
|
|
# request is executed. |
1317
|
|
|
token = "" |
1318
|
|
|
|
1319
|
|
|
request = _Request(self.network, 'auth.getSession', {'token': token}) |
1320
|
|
|
|
1321
|
|
|
# default action is that a request is signed only when |
1322
|
|
|
# a session key is provided. |
1323
|
|
|
request.sign_it() |
1324
|
|
|
|
1325
|
|
|
doc = request.execute() |
1326
|
|
|
|
1327
|
|
|
return doc.getElementsByTagName('key')[0].firstChild.data |
1328
|
|
|
|
1329
|
1 |
|
def get_session_key(self, username, password_hash): |
1330
|
|
|
""" |
1331
|
|
|
Retrieve a session key with a username and a md5 hash of the user's |
1332
|
|
|
password. |
1333
|
|
|
""" |
1334
|
|
|
|
1335
|
1 |
|
params = { |
1336
|
|
|
"username": username, "authToken": md5(username + password_hash)} |
1337
|
1 |
|
request = _Request(self.network, "auth.getMobileSession", params) |
1338
|
|
|
|
1339
|
|
|
# default action is that a request is signed only when |
1340
|
|
|
# a session key is provided. |
1341
|
1 |
|
request.sign_it() |
1342
|
|
|
|
1343
|
1 |
|
doc = request.execute() |
1344
|
|
|
|
1345
|
1 |
|
return _extract(doc, "key") |
1346
|
|
|
|
1347
|
1 |
|
TopItem = collections.namedtuple("TopItem", ["item", "weight"]) |
1348
|
1 |
|
SimilarItem = collections.namedtuple("SimilarItem", ["item", "match"]) |
1349
|
1 |
|
LibraryItem = collections.namedtuple( |
1350
|
|
|
"LibraryItem", ["item", "playcount", "tagcount"]) |
1351
|
1 |
|
PlayedTrack = collections.namedtuple( |
1352
|
|
|
"PlayedTrack", ["track", "album", "playback_date", "timestamp"]) |
1353
|
1 |
|
LovedTrack = collections.namedtuple( |
1354
|
|
|
"LovedTrack", ["track", "date", "timestamp"]) |
1355
|
1 |
|
ImageSizes = collections.namedtuple( |
1356
|
|
|
"ImageSizes", [ |
1357
|
|
|
"original", "large", "largesquare", "medium", "small", "extralarge"]) |
1358
|
1 |
|
Image = collections.namedtuple( |
1359
|
|
|
"Image", [ |
1360
|
|
|
"title", "url", "dateadded", "format", "owner", "sizes", "votes"]) |
1361
|
1 |
|
Shout = collections.namedtuple( |
1362
|
|
|
"Shout", ["body", "author", "date"]) |
1363
|
|
|
|
1364
|
|
|
|
1365
|
1 |
|
def _string_output(funct): |
1366
|
1 |
|
def r(*args): |
1367
|
1 |
|
return _string(funct(*args)) |
1368
|
|
|
|
1369
|
1 |
|
return r |
1370
|
|
|
|
1371
|
|
|
|
1372
|
1 |
|
def _pad_list(given_list, desired_length, padding=None): |
1373
|
|
|
""" |
1374
|
|
|
Pads a list to be of the desired_length. |
1375
|
|
|
""" |
1376
|
|
|
|
1377
|
|
|
while len(given_list) < desired_length: |
1378
|
|
|
given_list.append(padding) |
1379
|
|
|
|
1380
|
|
|
return given_list |
1381
|
|
|
|
1382
|
|
|
|
1383
|
1 |
|
class _BaseObject(object): |
1384
|
|
|
"""An abstract webservices object.""" |
1385
|
|
|
|
1386
|
1 |
|
network = None |
1387
|
|
|
|
1388
|
1 |
|
def __init__(self, network, ws_prefix): |
1389
|
1 |
|
self.network = network |
1390
|
1 |
|
self.ws_prefix = ws_prefix |
1391
|
|
|
|
1392
|
1 |
|
def _request(self, method_name, cacheable=False, params=None): |
1393
|
1 |
|
if not params: |
1394
|
1 |
|
params = self._get_params() |
1395
|
|
|
|
1396
|
1 |
|
return _Request(self.network, method_name, params).execute(cacheable) |
1397
|
|
|
|
1398
|
1 |
|
def _get_params(self): |
1399
|
|
|
"""Returns the most common set of parameters between all objects.""" |
1400
|
|
|
|
1401
|
|
|
return {} |
1402
|
|
|
|
1403
|
1 |
|
def __hash__(self): |
1404
|
|
|
# Convert any ints (or whatever) into strings |
1405
|
1 |
|
values = map(six.text_type, self._get_params().values()) |
1406
|
|
|
|
1407
|
1 |
|
return hash(self.network) + hash(six.text_type(type(self)) + "".join( |
1408
|
|
|
list(self._get_params().keys()) + list(values) |
1409
|
|
|
).lower()) |
1410
|
|
|
|
1411
|
1 |
|
def _extract_cdata_from_request(self, method_name, tag_name, params): |
1412
|
1 |
|
doc = self._request(method_name, True, params) |
1413
|
|
|
|
1414
|
1 |
|
return doc.getElementsByTagName( |
1415
|
|
|
tag_name)[0].firstChild.wholeText.strip() |
1416
|
|
|
|
1417
|
1 |
|
def _get_things( |
1418
|
|
|
self, method, thing, thing_type, params=None, cacheable=True): |
1419
|
|
|
"""Returns a list of the most played thing_types by this thing.""" |
1420
|
|
|
|
1421
|
1 |
|
doc = self._request( |
1422
|
|
|
self.ws_prefix + "." + method, cacheable, params) |
1423
|
|
|
|
1424
|
1 |
|
seq = [] |
1425
|
1 |
|
for node in doc.getElementsByTagName(thing): |
1426
|
1 |
|
title = _extract(node, "name") |
1427
|
1 |
|
artist = _extract(node, "name", 1) |
1428
|
1 |
|
playcount = _number(_extract(node, "playcount")) |
1429
|
|
|
|
1430
|
1 |
|
seq.append(TopItem( |
1431
|
|
|
thing_type(artist, title, self.network), playcount)) |
1432
|
|
|
|
1433
|
1 |
|
return seq |
1434
|
|
|
|
1435
|
1 |
|
def get_top_fans(self, limit=None, cacheable=True): |
1436
|
|
|
"""Returns a list of the Users who played this the most. |
1437
|
|
|
# Parameters: |
1438
|
|
|
* limit int: Max elements. |
1439
|
|
|
# For Artist/Track |
1440
|
|
|
""" |
1441
|
|
|
|
1442
|
1 |
|
doc = self._request(self.ws_prefix + '.getTopFans', cacheable) |
1443
|
|
|
|
1444
|
|
|
seq = [] |
1445
|
|
|
|
1446
|
|
|
elements = doc.getElementsByTagName('user') |
1447
|
|
|
|
1448
|
|
|
for element in elements: |
1449
|
|
|
if limit and len(seq) >= limit: |
1450
|
|
|
break |
1451
|
|
|
|
1452
|
|
|
name = _extract(element, 'name') |
1453
|
|
|
weight = _number(_extract(element, 'weight')) |
1454
|
|
|
|
1455
|
|
|
seq.append(TopItem(User(name, self.network), weight)) |
1456
|
|
|
|
1457
|
|
|
return seq |
1458
|
|
|
|
1459
|
1 |
|
def share(self, users, message=None): |
1460
|
|
|
""" |
1461
|
|
|
Shares this (sends out recommendations). |
1462
|
|
|
Parameters: |
1463
|
|
|
* users [User|str,]: A list that can contain usernames, emails, |
1464
|
|
|
User objects, or all of them. |
1465
|
|
|
* message str: A message to include in the recommendation message. |
1466
|
|
|
Only for Artist/Event/Track. |
1467
|
|
|
""" |
1468
|
|
|
|
1469
|
|
|
# Last.fm currently accepts a max of 10 recipient at a time |
1470
|
|
|
while(len(users) > 10): |
1471
|
|
|
section = users[0:9] |
1472
|
|
|
users = users[9:] |
1473
|
|
|
self.share(section, message) |
1474
|
|
|
|
1475
|
|
|
nusers = [] |
1476
|
|
|
for user in users: |
1477
|
|
|
if isinstance(user, User): |
1478
|
|
|
nusers.append(user.get_name()) |
1479
|
|
|
else: |
1480
|
|
|
nusers.append(user) |
1481
|
|
|
|
1482
|
|
|
params = self._get_params() |
1483
|
|
|
recipients = ','.join(nusers) |
1484
|
|
|
params['recipient'] = recipients |
1485
|
|
|
if message: |
1486
|
|
|
params['message'] = message |
1487
|
|
|
|
1488
|
|
|
self._request(self.ws_prefix + '.share', False, params) |
1489
|
|
|
|
1490
|
1 |
|
def get_wiki_published_date(self): |
1491
|
|
|
""" |
1492
|
|
|
Returns the summary of the wiki. |
1493
|
|
|
Only for Album/Track. |
1494
|
|
|
""" |
1495
|
1 |
|
return self.get_wiki("published") |
1496
|
|
|
|
1497
|
1 |
|
def get_wiki_summary(self): |
1498
|
|
|
""" |
1499
|
|
|
Returns the summary of the wiki. |
1500
|
|
|
Only for Album/Track. |
1501
|
|
|
""" |
1502
|
1 |
|
return self.get_wiki("summary") |
1503
|
|
|
|
1504
|
1 |
|
def get_wiki_content(self): |
1505
|
|
|
""" |
1506
|
|
|
Returns the summary of the wiki. |
1507
|
|
|
Only for Album/Track. |
1508
|
|
|
""" |
1509
|
1 |
|
return self.get_wiki("content") |
1510
|
|
|
|
1511
|
1 |
|
def get_wiki(self, section): |
1512
|
|
|
""" |
1513
|
|
|
Returns a section of the wiki. |
1514
|
|
|
Only for Album/Track. |
1515
|
|
|
section can be "content", "summary" or |
1516
|
|
|
"published" (for published date) |
1517
|
|
|
""" |
1518
|
|
|
|
1519
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
1520
|
|
|
|
1521
|
1 |
|
if len(doc.getElementsByTagName("wiki")) == 0: |
1522
|
|
|
return |
1523
|
|
|
|
1524
|
1 |
|
node = doc.getElementsByTagName("wiki")[0] |
1525
|
|
|
|
1526
|
1 |
|
return _extract(node, section) |
1527
|
|
|
|
1528
|
1 |
|
def get_shouts(self, limit=50, cacheable=False): |
1529
|
|
|
""" |
1530
|
|
|
Returns a sequence of Shout objects |
1531
|
|
|
""" |
1532
|
|
|
|
1533
|
1 |
|
shouts = [] |
1534
|
1 |
|
for node in _collect_nodes( |
1535
|
|
|
limit, |
1536
|
|
|
self, |
1537
|
|
|
self.ws_prefix + ".getShouts", |
1538
|
|
|
cacheable): |
1539
|
|
|
shouts.append( |
1540
|
|
|
Shout( |
1541
|
|
|
_extract(node, "body"), |
1542
|
|
|
User(_extract(node, "author"), self.network), |
1543
|
|
|
_extract(node, "date") |
1544
|
|
|
) |
1545
|
|
|
) |
1546
|
|
|
return shouts |
1547
|
|
|
|
1548
|
|
|
|
1549
|
1 |
|
class _Chartable(object): |
1550
|
|
|
"""Common functions for classes with charts.""" |
1551
|
|
|
|
1552
|
1 |
|
def __init__(self, ws_prefix): |
1553
|
1 |
|
self.ws_prefix = ws_prefix # TODO move to _BaseObject? |
1554
|
|
|
|
1555
|
1 |
|
def get_weekly_chart_dates(self): |
1556
|
|
|
"""Returns a list of From and To tuples for the available charts.""" |
1557
|
|
|
|
1558
|
1 |
|
doc = self._request(self.ws_prefix + ".getWeeklyChartList", True) |
1559
|
|
|
|
1560
|
1 |
|
seq = [] |
1561
|
1 |
|
for node in doc.getElementsByTagName("chart"): |
1562
|
1 |
|
seq.append((node.getAttribute("from"), node.getAttribute("to"))) |
1563
|
|
|
|
1564
|
1 |
|
return seq |
1565
|
|
|
|
1566
|
1 |
|
def get_weekly_album_charts(self, from_date=None, to_date=None): |
1567
|
|
|
""" |
1568
|
|
|
Returns the weekly album charts for the week starting from the |
1569
|
|
|
from_date value to the to_date value. |
1570
|
|
|
Only for Group or User. |
1571
|
|
|
""" |
1572
|
1 |
|
return self.get_weekly_charts("album", from_date, to_date) |
1573
|
|
|
|
1574
|
1 |
|
def get_weekly_artist_charts(self, from_date=None, to_date=None): |
1575
|
|
|
""" |
1576
|
|
|
Returns the weekly artist charts for the week starting from the |
1577
|
|
|
from_date value to the to_date value. |
1578
|
|
|
Only for Group, Tag or User. |
1579
|
|
|
""" |
1580
|
1 |
|
return self.get_weekly_charts("artist", from_date, to_date) |
1581
|
|
|
|
1582
|
1 |
|
def get_weekly_track_charts(self, from_date=None, to_date=None): |
1583
|
|
|
""" |
1584
|
|
|
Returns the weekly track charts for the week starting from the |
1585
|
|
|
from_date value to the to_date value. |
1586
|
|
|
Only for Group or User. |
1587
|
|
|
""" |
1588
|
1 |
|
return self.get_weekly_charts("track", from_date, to_date) |
1589
|
|
|
|
1590
|
1 |
|
def get_weekly_charts(self, chart_kind, from_date=None, to_date=None): |
1591
|
|
|
""" |
1592
|
|
|
Returns the weekly charts for the week starting from the |
1593
|
|
|
from_date value to the to_date value. |
1594
|
|
|
chart_kind should be one of "album", "artist" or "track" |
1595
|
|
|
""" |
1596
|
1 |
|
method = ".getWeekly" + chart_kind.title() + "Chart" |
1597
|
1 |
|
chart_type = eval(chart_kind.title()) # string to type |
1598
|
|
|
|
1599
|
1 |
|
params = self._get_params() |
1600
|
1 |
|
if from_date and to_date: |
1601
|
1 |
|
params["from"] = from_date |
1602
|
1 |
|
params["to"] = to_date |
1603
|
|
|
|
1604
|
1 |
|
doc = self._request( |
1605
|
|
|
self.ws_prefix + method, True, params) |
1606
|
|
|
|
1607
|
1 |
|
seq = [] |
1608
|
1 |
|
for node in doc.getElementsByTagName(chart_kind.lower()): |
1609
|
1 |
|
item = chart_type( |
1610
|
|
|
_extract(node, "artist"), _extract(node, "name"), self.network) |
1611
|
1 |
|
weight = _number(_extract(node, "playcount")) |
1612
|
1 |
|
seq.append(TopItem(item, weight)) |
1613
|
|
|
|
1614
|
1 |
|
return seq |
1615
|
|
|
|
1616
|
|
|
|
1617
|
1 |
|
class _Taggable(object): |
1618
|
|
|
"""Common functions for classes with tags.""" |
1619
|
|
|
|
1620
|
1 |
|
def __init__(self, ws_prefix): |
1621
|
1 |
|
self.ws_prefix = ws_prefix # TODO move to _BaseObject |
1622
|
|
|
|
1623
|
1 |
|
def add_tags(self, tags): |
1624
|
|
|
"""Adds one or several tags. |
1625
|
|
|
* tags: A sequence of tag names or Tag objects. |
1626
|
|
|
""" |
1627
|
|
|
|
1628
|
1 |
|
for tag in tags: |
1629
|
1 |
|
self.add_tag(tag) |
1630
|
|
|
|
1631
|
1 |
|
def add_tag(self, tag): |
1632
|
|
|
"""Adds one tag. |
1633
|
|
|
* tag: a tag name or a Tag object. |
1634
|
|
|
""" |
1635
|
|
|
|
1636
|
1 |
|
if isinstance(tag, Tag): |
1637
|
1 |
|
tag = tag.get_name() |
1638
|
|
|
|
1639
|
1 |
|
params = self._get_params() |
1640
|
1 |
|
params['tags'] = tag |
1641
|
|
|
|
1642
|
1 |
|
self._request(self.ws_prefix + '.addTags', False, params) |
1643
|
|
|
|
1644
|
1 |
|
def remove_tag(self, tag): |
1645
|
|
|
"""Remove a user's tag from this object.""" |
1646
|
|
|
|
1647
|
1 |
|
if isinstance(tag, Tag): |
1648
|
1 |
|
tag = tag.get_name() |
1649
|
|
|
|
1650
|
1 |
|
params = self._get_params() |
1651
|
1 |
|
params['tag'] = tag |
1652
|
|
|
|
1653
|
1 |
|
self._request(self.ws_prefix + '.removeTag', False, params) |
1654
|
|
|
|
1655
|
1 |
|
def get_tags(self): |
1656
|
|
|
"""Returns a list of the tags set by the user to this object.""" |
1657
|
|
|
|
1658
|
|
|
# Uncacheable because it can be dynamically changed by the user. |
1659
|
1 |
|
params = self._get_params() |
1660
|
|
|
|
1661
|
1 |
|
doc = self._request(self.ws_prefix + '.getTags', False, params) |
1662
|
1 |
|
tag_names = _extract_all(doc, 'name') |
1663
|
1 |
|
tags = [] |
1664
|
1 |
|
for tag in tag_names: |
1665
|
1 |
|
tags.append(Tag(tag, self.network)) |
1666
|
|
|
|
1667
|
1 |
|
return tags |
1668
|
|
|
|
1669
|
1 |
|
def remove_tags(self, tags): |
1670
|
|
|
"""Removes one or several tags from this object. |
1671
|
|
|
* tags: a sequence of tag names or Tag objects. |
1672
|
|
|
""" |
1673
|
|
|
|
1674
|
1 |
|
for tag in tags: |
1675
|
1 |
|
self.remove_tag(tag) |
1676
|
|
|
|
1677
|
1 |
|
def clear_tags(self): |
1678
|
|
|
"""Clears all the user-set tags. """ |
1679
|
|
|
|
1680
|
|
|
self.remove_tags(*(self.get_tags())) |
1681
|
|
|
|
1682
|
1 |
|
def set_tags(self, tags): |
1683
|
|
|
"""Sets this object's tags to only those tags. |
1684
|
|
|
* tags: a sequence of tag names or Tag objects. |
1685
|
|
|
""" |
1686
|
|
|
|
1687
|
1 |
|
c_old_tags = [] |
1688
|
1 |
|
old_tags = [] |
1689
|
1 |
|
c_new_tags = [] |
1690
|
1 |
|
new_tags = [] |
1691
|
|
|
|
1692
|
1 |
|
to_remove = [] |
1693
|
1 |
|
to_add = [] |
1694
|
|
|
|
1695
|
1 |
|
tags_on_server = self.get_tags() |
1696
|
|
|
|
1697
|
1 |
|
for tag in tags_on_server: |
1698
|
1 |
|
c_old_tags.append(tag.get_name().lower()) |
1699
|
1 |
|
old_tags.append(tag.get_name()) |
1700
|
|
|
|
1701
|
1 |
|
for tag in tags: |
1702
|
1 |
|
c_new_tags.append(tag.lower()) |
1703
|
1 |
|
new_tags.append(tag) |
1704
|
|
|
|
1705
|
1 |
|
for i in range(0, len(old_tags)): |
1706
|
1 |
|
if not c_old_tags[i] in c_new_tags: |
1707
|
1 |
|
to_remove.append(old_tags[i]) |
1708
|
|
|
|
1709
|
1 |
|
for i in range(0, len(new_tags)): |
1710
|
1 |
|
if not c_new_tags[i] in c_old_tags: |
1711
|
|
|
to_add.append(new_tags[i]) |
1712
|
|
|
|
1713
|
1 |
|
self.remove_tags(to_remove) |
1714
|
1 |
|
self.add_tags(to_add) |
1715
|
|
|
|
1716
|
1 |
|
def get_top_tags(self, limit=None): |
1717
|
|
|
"""Returns a list of the most frequently used Tags on this object.""" |
1718
|
|
|
|
1719
|
1 |
|
doc = self._request(self.ws_prefix + '.getTopTags', True) |
1720
|
|
|
|
1721
|
1 |
|
elements = doc.getElementsByTagName('tag') |
1722
|
1 |
|
seq = [] |
1723
|
|
|
|
1724
|
1 |
|
for element in elements: |
1725
|
1 |
|
tag_name = _extract(element, 'name') |
1726
|
1 |
|
tagcount = _extract(element, 'count') |
1727
|
|
|
|
1728
|
1 |
|
seq.append(TopItem(Tag(tag_name, self.network), tagcount)) |
1729
|
|
|
|
1730
|
1 |
|
if limit: |
1731
|
1 |
|
seq = seq[:limit] |
1732
|
|
|
|
1733
|
1 |
|
return seq |
1734
|
|
|
|
1735
|
|
|
|
1736
|
1 |
|
class WSError(Exception): |
1737
|
|
|
"""Exception related to the Network web service""" |
1738
|
|
|
|
1739
|
1 |
|
def __init__(self, network, status, details): |
1740
|
1 |
|
self.status = status |
1741
|
1 |
|
self.details = details |
1742
|
1 |
|
self.network = network |
1743
|
|
|
|
1744
|
1 |
|
@_string_output |
1745
|
|
|
def __str__(self): |
1746
|
1 |
|
return self.details |
1747
|
|
|
|
1748
|
1 |
|
def get_id(self): |
1749
|
|
|
"""Returns the exception ID, from one of the following: |
1750
|
|
|
STATUS_INVALID_SERVICE = 2 |
1751
|
|
|
STATUS_INVALID_METHOD = 3 |
1752
|
|
|
STATUS_AUTH_FAILED = 4 |
1753
|
|
|
STATUS_INVALID_FORMAT = 5 |
1754
|
|
|
STATUS_INVALID_PARAMS = 6 |
1755
|
|
|
STATUS_INVALID_RESOURCE = 7 |
1756
|
|
|
STATUS_TOKEN_ERROR = 8 |
1757
|
|
|
STATUS_INVALID_SK = 9 |
1758
|
|
|
STATUS_INVALID_API_KEY = 10 |
1759
|
|
|
STATUS_OFFLINE = 11 |
1760
|
|
|
STATUS_SUBSCRIBERS_ONLY = 12 |
1761
|
|
|
STATUS_TOKEN_UNAUTHORIZED = 14 |
1762
|
|
|
STATUS_TOKEN_EXPIRED = 15 |
1763
|
|
|
""" |
1764
|
|
|
|
1765
|
|
|
return self.status |
1766
|
|
|
|
1767
|
|
|
|
1768
|
1 |
|
class MalformedResponseError(Exception): |
1769
|
|
|
"""Exception conveying a malformed response from the music network.""" |
1770
|
|
|
|
1771
|
1 |
|
def __init__(self, network, underlying_error): |
1772
|
|
|
self.network = network |
1773
|
|
|
self.underlying_error = underlying_error |
1774
|
|
|
|
1775
|
1 |
|
def __str__(self): |
1776
|
|
|
return "Malformed response from {}. Underlying error: {}".format( |
1777
|
|
|
self.network.name, str(self.underlying_error)) |
1778
|
|
|
|
1779
|
|
|
|
1780
|
1 |
|
class NetworkError(Exception): |
1781
|
|
|
"""Exception conveying a problem in sending a request to Last.fm""" |
1782
|
|
|
|
1783
|
1 |
|
def __init__(self, network, underlying_error): |
1784
|
|
|
self.network = network |
1785
|
|
|
self.underlying_error = underlying_error |
1786
|
|
|
|
1787
|
1 |
|
def __str__(self): |
1788
|
|
|
return "NetworkError: %s" % str(self.underlying_error) |
1789
|
|
|
|
1790
|
|
|
|
1791
|
1 |
|
class _Opus(_BaseObject, _Taggable): |
1792
|
|
|
"""An album or track.""" |
1793
|
|
|
|
1794
|
1 |
|
artist = None |
1795
|
1 |
|
title = None |
1796
|
1 |
|
username = None |
1797
|
|
|
|
1798
|
1 |
|
__hash__ = _BaseObject.__hash__ |
1799
|
|
|
|
1800
|
1 |
|
def __init__(self, artist, title, network, ws_prefix, username=None): |
1801
|
|
|
""" |
1802
|
|
|
Create an opus instance. |
1803
|
|
|
# Parameters: |
1804
|
|
|
* artist: An artist name or an Artist object. |
1805
|
|
|
* title: The album or track title. |
1806
|
|
|
* ws_prefix: 'album' or 'track' |
1807
|
|
|
""" |
1808
|
|
|
|
1809
|
1 |
|
_BaseObject.__init__(self, network, ws_prefix) |
1810
|
1 |
|
_Taggable.__init__(self, ws_prefix) |
1811
|
|
|
|
1812
|
1 |
|
if isinstance(artist, Artist): |
1813
|
1 |
|
self.artist = artist |
1814
|
|
|
else: |
1815
|
1 |
|
self.artist = Artist(artist, self.network) |
1816
|
|
|
|
1817
|
1 |
|
self.title = title |
1818
|
1 |
|
self.username = username |
1819
|
|
|
|
1820
|
1 |
|
def __repr__(self): |
1821
|
1 |
|
return "pylast.%s(%s, %s, %s)" % ( |
1822
|
|
|
self.ws_prefix.title(), repr(self.artist.name), |
1823
|
|
|
repr(self.title), repr(self.network)) |
1824
|
|
|
|
1825
|
1 |
|
@_string_output |
1826
|
|
|
def __str__(self): |
1827
|
1 |
|
return _unicode("%s - %s") % ( |
1828
|
|
|
self.get_artist().get_name(), self.get_title()) |
1829
|
|
|
|
1830
|
1 |
|
def __eq__(self, other): |
1831
|
1 |
|
if type(self) != type(other): |
1832
|
1 |
|
return False |
1833
|
1 |
|
a = self.get_title().lower() |
1834
|
1 |
|
b = other.get_title().lower() |
1835
|
1 |
|
c = self.get_artist().get_name().lower() |
1836
|
1 |
|
d = other.get_artist().get_name().lower() |
1837
|
1 |
|
return (a == b) and (c == d) |
1838
|
|
|
|
1839
|
1 |
|
def __ne__(self, other): |
1840
|
1 |
|
return not self.__eq__(other) |
1841
|
|
|
|
1842
|
1 |
|
def _get_params(self): |
1843
|
1 |
|
return { |
1844
|
|
|
'artist': self.get_artist().get_name(), |
1845
|
|
|
self.ws_prefix: self.get_title()} |
1846
|
|
|
|
1847
|
1 |
|
def get_artist(self): |
1848
|
|
|
"""Returns the associated Artist object.""" |
1849
|
|
|
|
1850
|
1 |
|
return self.artist |
1851
|
|
|
|
1852
|
1 |
|
def get_title(self, properly_capitalized=False): |
1853
|
|
|
"""Returns the artist or track title.""" |
1854
|
1 |
|
if properly_capitalized: |
1855
|
1 |
|
self.title = _extract( |
1856
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
1857
|
|
|
|
1858
|
1 |
|
return self.title |
1859
|
|
|
|
1860
|
1 |
|
def get_name(self, properly_capitalized=False): |
1861
|
|
|
"""Returns the album or track title (alias to get_title()).""" |
1862
|
|
|
|
1863
|
1 |
|
return self.get_title(properly_capitalized) |
1864
|
|
|
|
1865
|
1 |
|
def get_id(self): |
1866
|
|
|
"""Returns the ID on the network.""" |
1867
|
|
|
|
1868
|
1 |
|
return _extract( |
1869
|
|
|
self._request(self.ws_prefix + ".getInfo", cacheable=True), "id") |
1870
|
|
|
|
1871
|
1 |
|
def get_playcount(self): |
1872
|
|
|
"""Returns the number of plays on the network""" |
1873
|
|
|
|
1874
|
1 |
|
return _number(_extract( |
1875
|
|
|
self._request( |
1876
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "playcount")) |
1877
|
|
|
|
1878
|
1 |
|
def get_userplaycount(self): |
1879
|
|
|
"""Returns the number of plays by a given username""" |
1880
|
|
|
|
1881
|
1 |
|
if not self.username: |
1882
|
|
|
return |
1883
|
|
|
|
1884
|
1 |
|
params = self._get_params() |
1885
|
1 |
|
params['username'] = self.username |
1886
|
|
|
|
1887
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
1888
|
1 |
|
return _number(_extract(doc, "userplaycount")) |
1889
|
|
|
|
1890
|
1 |
|
def get_listener_count(self): |
1891
|
|
|
"""Returns the number of listeners on the network""" |
1892
|
|
|
|
1893
|
1 |
|
return _number(_extract( |
1894
|
|
|
self._request( |
1895
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "listeners")) |
1896
|
|
|
|
1897
|
1 |
|
def get_mbid(self): |
1898
|
|
|
"""Returns the MusicBrainz ID of the album or track.""" |
1899
|
|
|
|
1900
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", cacheable=True) |
1901
|
|
|
|
1902
|
1 |
|
try: |
1903
|
1 |
|
lfm = doc.getElementsByTagName('lfm')[0] |
1904
|
1 |
|
opus = next(self._get_children_by_tag_name(lfm, self.ws_prefix)) |
1905
|
1 |
|
mbid = next(self._get_children_by_tag_name(opus, "mbid")) |
1906
|
1 |
|
return mbid.firstChild.nodeValue |
1907
|
1 |
|
except StopIteration: |
1908
|
1 |
|
return None |
1909
|
|
|
|
1910
|
1 |
|
def _get_children_by_tag_name(self, node, tag_name): |
1911
|
1 |
|
for child in node.childNodes: |
1912
|
1 |
|
if (child.nodeType == child.ELEMENT_NODE and |
1913
|
|
|
(tag_name == '*' or child.tagName == tag_name)): |
1914
|
1 |
|
yield child |
1915
|
|
|
|
1916
|
|
|
|
1917
|
1 |
|
class Album(_Opus): |
1918
|
|
|
"""An album.""" |
1919
|
|
|
|
1920
|
1 |
|
__hash__ = _Opus.__hash__ |
1921
|
|
|
|
1922
|
1 |
|
def __init__(self, artist, title, network, username=None): |
1923
|
1 |
|
super(Album, self).__init__(artist, title, network, "album", username) |
1924
|
|
|
|
1925
|
1 |
|
def get_release_date(self): |
1926
|
|
|
"""Returns the release date of the album.""" |
1927
|
|
|
|
1928
|
1 |
|
return _extract(self._request( |
1929
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "releasedate") |
1930
|
|
|
|
1931
|
1 |
|
def get_cover_image(self, size=COVER_EXTRA_LARGE): |
1932
|
|
|
""" |
1933
|
|
|
Returns a uri to the cover image |
1934
|
|
|
size can be one of: |
1935
|
|
|
COVER_EXTRA_LARGE |
1936
|
|
|
COVER_LARGE |
1937
|
|
|
COVER_MEDIUM |
1938
|
|
|
COVER_SMALL |
1939
|
|
|
""" |
1940
|
|
|
|
1941
|
|
|
return _extract_all( |
1942
|
|
|
self._request( |
1943
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), 'image')[size] |
1944
|
|
|
|
1945
|
1 |
|
def get_tracks(self): |
1946
|
|
|
"""Returns the list of Tracks on this album.""" |
1947
|
|
|
|
1948
|
1 |
|
return _extract_tracks( |
1949
|
|
|
self._request( |
1950
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "tracks") |
1951
|
|
|
|
1952
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
1953
|
|
|
"""Returns the URL of the album or track page on the network. |
1954
|
|
|
# Parameters: |
1955
|
|
|
* domain_name str: The network's language domain. Possible values: |
1956
|
|
|
o DOMAIN_ENGLISH |
1957
|
|
|
o DOMAIN_GERMAN |
1958
|
|
|
o DOMAIN_SPANISH |
1959
|
|
|
o DOMAIN_FRENCH |
1960
|
|
|
o DOMAIN_ITALIAN |
1961
|
|
|
o DOMAIN_POLISH |
1962
|
|
|
o DOMAIN_PORTUGUESE |
1963
|
|
|
o DOMAIN_SWEDISH |
1964
|
|
|
o DOMAIN_TURKISH |
1965
|
|
|
o DOMAIN_RUSSIAN |
1966
|
|
|
o DOMAIN_JAPANESE |
1967
|
|
|
o DOMAIN_CHINESE |
1968
|
|
|
""" |
1969
|
|
|
|
1970
|
1 |
|
artist = _url_safe(self.get_artist().get_name()) |
1971
|
1 |
|
title = _url_safe(self.get_title()) |
1972
|
|
|
|
1973
|
1 |
|
return self.network._get_url( |
1974
|
|
|
domain_name, self.ws_prefix) % { |
1975
|
|
|
'artist': artist, 'album': title} |
1976
|
|
|
|
1977
|
|
|
|
1978
|
1 |
|
class Artist(_BaseObject, _Taggable): |
1979
|
|
|
"""An artist.""" |
1980
|
|
|
|
1981
|
1 |
|
name = None |
1982
|
1 |
|
username = None |
1983
|
|
|
|
1984
|
1 |
|
__hash__ = _BaseObject.__hash__ |
1985
|
|
|
|
1986
|
1 |
|
def __init__(self, name, network, username=None): |
1987
|
|
|
"""Create an artist object. |
1988
|
|
|
# Parameters: |
1989
|
|
|
* name str: The artist's name. |
1990
|
|
|
""" |
1991
|
|
|
|
1992
|
1 |
|
_BaseObject.__init__(self, network, 'artist') |
1993
|
1 |
|
_Taggable.__init__(self, 'artist') |
1994
|
|
|
|
1995
|
1 |
|
self.name = name |
1996
|
1 |
|
self.username = username |
1997
|
|
|
|
1998
|
1 |
|
def __repr__(self): |
1999
|
|
|
return "pylast.Artist(%s, %s)" % ( |
2000
|
|
|
repr(self.get_name()), repr(self.network)) |
2001
|
|
|
|
2002
|
1 |
|
def __unicode__(self): |
2003
|
1 |
|
return six.text_type(self.get_name()) |
2004
|
|
|
|
2005
|
1 |
|
@_string_output |
2006
|
|
|
def __str__(self): |
2007
|
1 |
|
return self.__unicode__() |
2008
|
|
|
|
2009
|
1 |
|
def __eq__(self, other): |
2010
|
1 |
|
if type(self) is type(other): |
2011
|
1 |
|
return self.get_name().lower() == other.get_name().lower() |
2012
|
|
|
else: |
2013
|
1 |
|
return False |
2014
|
|
|
|
2015
|
1 |
|
def __ne__(self, other): |
2016
|
1 |
|
return not self.__eq__(other) |
2017
|
|
|
|
2018
|
1 |
|
def _get_params(self): |
2019
|
1 |
|
return {self.ws_prefix: self.get_name()} |
2020
|
|
|
|
2021
|
1 |
|
def get_name(self, properly_capitalized=False): |
2022
|
|
|
"""Returns the name of the artist. |
2023
|
|
|
If properly_capitalized was asserted then the name would be downloaded |
2024
|
|
|
overwriting the given one.""" |
2025
|
|
|
|
2026
|
1 |
|
if properly_capitalized: |
2027
|
1 |
|
self.name = _extract( |
2028
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
2029
|
|
|
|
2030
|
1 |
|
return self.name |
2031
|
|
|
|
2032
|
1 |
|
def get_correction(self): |
2033
|
|
|
"""Returns the corrected artist name.""" |
2034
|
|
|
|
2035
|
1 |
|
return _extract( |
2036
|
|
|
self._request(self.ws_prefix + ".getCorrection"), "name") |
2037
|
|
|
|
2038
|
1 |
|
def get_cover_image(self, size=COVER_MEGA): |
2039
|
|
|
""" |
2040
|
|
|
Returns a uri to the cover image |
2041
|
|
|
size can be one of: |
2042
|
|
|
COVER_MEGA |
2043
|
|
|
COVER_EXTRA_LARGE |
2044
|
|
|
COVER_LARGE |
2045
|
|
|
COVER_MEDIUM |
2046
|
|
|
COVER_SMALL |
2047
|
|
|
""" |
2048
|
|
|
|
2049
|
1 |
|
return _extract_all( |
2050
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "image")[size] |
2051
|
|
|
|
2052
|
1 |
|
def get_playcount(self): |
2053
|
|
|
"""Returns the number of plays on the network.""" |
2054
|
|
|
|
2055
|
1 |
|
return _number(_extract( |
2056
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "playcount")) |
2057
|
|
|
|
2058
|
1 |
|
def get_userplaycount(self): |
2059
|
|
|
"""Returns the number of plays by a given username""" |
2060
|
|
|
|
2061
|
|
|
if not self.username: |
2062
|
|
|
return |
2063
|
|
|
|
2064
|
|
|
params = self._get_params() |
2065
|
|
|
params['username'] = self.username |
2066
|
|
|
|
2067
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
2068
|
|
|
return _number(_extract(doc, "userplaycount")) |
2069
|
|
|
|
2070
|
1 |
|
def get_mbid(self): |
2071
|
|
|
"""Returns the MusicBrainz ID of this artist.""" |
2072
|
|
|
|
2073
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
2074
|
|
|
|
2075
|
1 |
|
return _extract(doc, "mbid") |
2076
|
|
|
|
2077
|
1 |
|
def get_listener_count(self): |
2078
|
|
|
"""Returns the number of listeners on the network.""" |
2079
|
|
|
|
2080
|
1 |
|
if hasattr(self, "listener_count"): |
2081
|
|
|
return self.listener_count |
2082
|
|
|
else: |
2083
|
1 |
|
self.listener_count = _number(_extract( |
2084
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "listeners")) |
2085
|
1 |
|
return self.listener_count |
2086
|
|
|
|
2087
|
1 |
|
def is_streamable(self): |
2088
|
|
|
"""Returns True if the artist is streamable.""" |
2089
|
|
|
|
2090
|
1 |
|
return bool(_number(_extract( |
2091
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "streamable"))) |
2092
|
|
|
|
2093
|
1 |
|
def get_bio(self, section, language=None): |
2094
|
|
|
""" |
2095
|
|
|
Returns a section of the bio. |
2096
|
|
|
section can be "content", "summary" or |
2097
|
|
|
"published" (for published date) |
2098
|
|
|
""" |
2099
|
1 |
|
if language: |
2100
|
1 |
|
params = self._get_params() |
2101
|
1 |
|
params["lang"] = language |
2102
|
|
|
else: |
2103
|
1 |
|
params = None |
2104
|
|
|
|
2105
|
1 |
|
return self._extract_cdata_from_request( |
2106
|
|
|
self.ws_prefix + ".getInfo", section, params) |
2107
|
|
|
|
2108
|
1 |
|
def get_bio_published_date(self): |
2109
|
|
|
"""Returns the date on which the artist's biography was published.""" |
2110
|
1 |
|
return self.get_bio("published") |
2111
|
|
|
|
2112
|
1 |
|
def get_bio_summary(self, language=None): |
2113
|
|
|
"""Returns the summary of the artist's biography.""" |
2114
|
1 |
|
return self.get_bio("summary", language) |
2115
|
|
|
|
2116
|
1 |
|
def get_bio_content(self, language=None): |
2117
|
|
|
"""Returns the content of the artist's biography.""" |
2118
|
1 |
|
return self.get_bio("content", language) |
2119
|
|
|
|
2120
|
1 |
|
def get_upcoming_events(self): |
2121
|
|
|
"""Returns a list of the upcoming Events for this artist.""" |
2122
|
|
|
|
2123
|
1 |
|
doc = self._request(self.ws_prefix + '.getEvents', True) |
2124
|
|
|
|
2125
|
|
|
return _extract_events_from_doc(doc, self.network) |
2126
|
|
|
|
2127
|
1 |
|
def get_similar(self, limit=None): |
2128
|
|
|
"""Returns the similar artists on the network.""" |
2129
|
|
|
|
2130
|
1 |
|
params = self._get_params() |
2131
|
1 |
|
if limit: |
2132
|
1 |
|
params['limit'] = limit |
2133
|
|
|
|
2134
|
1 |
|
doc = self._request(self.ws_prefix + '.getSimilar', True, params) |
2135
|
|
|
|
2136
|
1 |
|
names = _extract_all(doc, "name") |
2137
|
1 |
|
matches = _extract_all(doc, "match") |
2138
|
|
|
|
2139
|
1 |
|
artists = [] |
2140
|
1 |
|
for i in range(0, len(names)): |
2141
|
1 |
|
artists.append(SimilarItem( |
2142
|
|
|
Artist(names[i], self.network), _number(matches[i]))) |
2143
|
|
|
|
2144
|
1 |
|
return artists |
2145
|
|
|
|
2146
|
1 |
|
def get_top_albums(self, limit=None, cacheable=True): |
2147
|
|
|
"""Returns a list of the top albums.""" |
2148
|
1 |
|
params = self._get_params() |
2149
|
1 |
|
if limit: |
2150
|
1 |
|
params['limit'] = limit |
2151
|
|
|
|
2152
|
1 |
|
return self._get_things( |
2153
|
|
|
"getTopAlbums", "album", Album, params, cacheable) |
2154
|
|
|
|
2155
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
2156
|
|
|
"""Returns a list of the most played Tracks by this artist.""" |
2157
|
1 |
|
params = self._get_params() |
2158
|
1 |
|
if limit: |
2159
|
1 |
|
params['limit'] = limit |
2160
|
|
|
|
2161
|
1 |
|
return self._get_things( |
2162
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2163
|
|
|
|
2164
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2165
|
|
|
"""Returns the url of the artist page on the network. |
2166
|
|
|
# Parameters: |
2167
|
|
|
* domain_name: The network's language domain. Possible values: |
2168
|
|
|
o DOMAIN_ENGLISH |
2169
|
|
|
o DOMAIN_GERMAN |
2170
|
|
|
o DOMAIN_SPANISH |
2171
|
|
|
o DOMAIN_FRENCH |
2172
|
|
|
o DOMAIN_ITALIAN |
2173
|
|
|
o DOMAIN_POLISH |
2174
|
|
|
o DOMAIN_PORTUGUESE |
2175
|
|
|
o DOMAIN_SWEDISH |
2176
|
|
|
o DOMAIN_TURKISH |
2177
|
|
|
o DOMAIN_RUSSIAN |
2178
|
|
|
o DOMAIN_JAPANESE |
2179
|
|
|
o DOMAIN_CHINESE |
2180
|
|
|
""" |
2181
|
|
|
|
2182
|
1 |
|
artist = _url_safe(self.get_name()) |
2183
|
|
|
|
2184
|
1 |
|
return self.network._get_url( |
2185
|
|
|
domain_name, "artist") % {'artist': artist} |
2186
|
|
|
|
2187
|
1 |
|
def shout(self, message): |
2188
|
|
|
""" |
2189
|
|
|
Post a shout |
2190
|
|
|
""" |
2191
|
|
|
|
2192
|
|
|
params = self._get_params() |
2193
|
|
|
params["message"] = message |
2194
|
|
|
|
2195
|
|
|
self._request("artist.Shout", False, params) |
2196
|
|
|
|
2197
|
1 |
|
def get_band_members(self): |
2198
|
|
|
"""Returns a list of band members or None if unknown.""" |
2199
|
|
|
|
2200
|
1 |
|
names = None |
2201
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
2202
|
|
|
|
2203
|
1 |
|
for node in doc.getElementsByTagName("bandmembers"): |
2204
|
|
|
names = _extract_all(node, "name") |
2205
|
|
|
|
2206
|
1 |
|
return names |
2207
|
|
|
|
2208
|
|
|
|
2209
|
1 |
|
class Event(_BaseObject): |
2210
|
|
|
"""An event.""" |
2211
|
|
|
|
2212
|
1 |
|
id = None |
2213
|
|
|
|
2214
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2215
|
|
|
|
2216
|
1 |
|
def __init__(self, event_id, network): |
2217
|
1 |
|
_BaseObject.__init__(self, network, 'event') |
2218
|
|
|
|
2219
|
1 |
|
self.id = event_id |
2220
|
|
|
|
2221
|
1 |
|
def __repr__(self): |
2222
|
1 |
|
return "pylast.Event(%s, %s)" % (repr(self.id), repr(self.network)) |
2223
|
|
|
|
2224
|
1 |
|
@_string_output |
2225
|
|
|
def __str__(self): |
2226
|
1 |
|
return "Event #" + str(self.get_id()) |
2227
|
|
|
|
2228
|
1 |
|
def __eq__(self, other): |
2229
|
1 |
|
if type(self) is type(other): |
2230
|
|
|
return self.get_id() == other.get_id() |
2231
|
|
|
else: |
2232
|
1 |
|
return False |
2233
|
|
|
|
2234
|
1 |
|
def __ne__(self, other): |
2235
|
1 |
|
return not self.__eq__(other) |
2236
|
|
|
|
2237
|
1 |
|
def _get_params(self): |
2238
|
1 |
|
return {'event': self.get_id()} |
2239
|
|
|
|
2240
|
1 |
|
def attend(self, attending_status): |
2241
|
|
|
"""Sets the attending status. |
2242
|
|
|
* attending_status: The attending status. Possible values: |
2243
|
|
|
o EVENT_ATTENDING |
2244
|
|
|
o EVENT_MAYBE_ATTENDING |
2245
|
|
|
o EVENT_NOT_ATTENDING |
2246
|
|
|
""" |
2247
|
|
|
|
2248
|
|
|
params = self._get_params() |
2249
|
|
|
params['status'] = attending_status |
2250
|
|
|
|
2251
|
|
|
self._request('event.attend', False, params) |
2252
|
|
|
|
2253
|
1 |
|
def get_attendees(self): |
2254
|
|
|
""" |
2255
|
|
|
Get a list of attendees for an event |
2256
|
|
|
""" |
2257
|
|
|
|
2258
|
|
|
doc = self._request("event.getAttendees", False) |
2259
|
|
|
|
2260
|
|
|
users = [] |
2261
|
|
|
for name in _extract_all(doc, "name"): |
2262
|
|
|
users.append(User(name, self.network)) |
2263
|
|
|
|
2264
|
|
|
return users |
2265
|
|
|
|
2266
|
1 |
|
def get_id(self): |
2267
|
|
|
"""Returns the id of the event on the network. """ |
2268
|
|
|
|
2269
|
1 |
|
return self.id |
2270
|
|
|
|
2271
|
1 |
|
def get_title(self): |
2272
|
|
|
"""Returns the title of the event. """ |
2273
|
|
|
|
2274
|
1 |
|
doc = self._request("event.getInfo", True) |
2275
|
|
|
|
2276
|
|
|
return _extract(doc, "title") |
2277
|
|
|
|
2278
|
1 |
|
def get_headliner(self): |
2279
|
|
|
"""Returns the headliner of the event. """ |
2280
|
|
|
|
2281
|
|
|
doc = self._request("event.getInfo", True) |
2282
|
|
|
|
2283
|
|
|
return Artist(_extract(doc, "headliner"), self.network) |
2284
|
|
|
|
2285
|
1 |
|
def get_artists(self): |
2286
|
|
|
"""Returns a list of the participating Artists. """ |
2287
|
|
|
|
2288
|
|
|
doc = self._request("event.getInfo", True) |
2289
|
|
|
names = _extract_all(doc, "artist") |
2290
|
|
|
|
2291
|
|
|
artists = [] |
2292
|
|
|
for name in names: |
2293
|
|
|
artists.append(Artist(name, self.network)) |
2294
|
|
|
|
2295
|
|
|
return artists |
2296
|
|
|
|
2297
|
1 |
|
def get_venue(self): |
2298
|
|
|
"""Returns the venue where the event is held.""" |
2299
|
|
|
|
2300
|
|
|
doc = self._request("event.getInfo", True) |
2301
|
|
|
|
2302
|
|
|
v = doc.getElementsByTagName("venue")[0] |
2303
|
|
|
venue_id = _number(_extract(v, "id")) |
2304
|
|
|
|
2305
|
|
|
return Venue(venue_id, self.network, venue_element=v) |
2306
|
|
|
|
2307
|
1 |
|
def get_start_date(self): |
2308
|
|
|
"""Returns the date when the event starts.""" |
2309
|
|
|
|
2310
|
|
|
doc = self._request("event.getInfo", True) |
2311
|
|
|
|
2312
|
|
|
return _extract(doc, "startDate") |
2313
|
|
|
|
2314
|
1 |
|
def get_description(self): |
2315
|
|
|
"""Returns the description of the event. """ |
2316
|
|
|
|
2317
|
|
|
doc = self._request("event.getInfo", True) |
2318
|
|
|
|
2319
|
|
|
return _extract(doc, "description") |
2320
|
|
|
|
2321
|
1 |
|
def get_cover_image(self, size=COVER_MEGA): |
2322
|
|
|
""" |
2323
|
|
|
Returns a uri to the cover image |
2324
|
|
|
size can be one of: |
2325
|
|
|
COVER_MEGA |
2326
|
|
|
COVER_EXTRA_LARGE |
2327
|
|
|
COVER_LARGE |
2328
|
|
|
COVER_MEDIUM |
2329
|
|
|
COVER_SMALL |
2330
|
|
|
""" |
2331
|
|
|
|
2332
|
|
|
doc = self._request("event.getInfo", True) |
2333
|
|
|
|
2334
|
|
|
return _extract_all(doc, "image")[size] |
2335
|
|
|
|
2336
|
1 |
|
def get_attendance_count(self): |
2337
|
|
|
"""Returns the number of attending people. """ |
2338
|
|
|
|
2339
|
|
|
doc = self._request("event.getInfo", True) |
2340
|
|
|
|
2341
|
|
|
return _number(_extract(doc, "attendance")) |
2342
|
|
|
|
2343
|
1 |
|
def get_review_count(self): |
2344
|
|
|
"""Returns the number of available reviews for this event. """ |
2345
|
|
|
|
2346
|
|
|
doc = self._request("event.getInfo", True) |
2347
|
|
|
|
2348
|
|
|
return _number(_extract(doc, "reviews")) |
2349
|
|
|
|
2350
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2351
|
|
|
"""Returns the url of the event page on the network. |
2352
|
|
|
* domain_name: The network's language domain. Possible values: |
2353
|
|
|
o DOMAIN_ENGLISH |
2354
|
|
|
o DOMAIN_GERMAN |
2355
|
|
|
o DOMAIN_SPANISH |
2356
|
|
|
o DOMAIN_FRENCH |
2357
|
|
|
o DOMAIN_ITALIAN |
2358
|
|
|
o DOMAIN_POLISH |
2359
|
|
|
o DOMAIN_PORTUGUESE |
2360
|
|
|
o DOMAIN_SWEDISH |
2361
|
|
|
o DOMAIN_TURKISH |
2362
|
|
|
o DOMAIN_RUSSIAN |
2363
|
|
|
o DOMAIN_JAPANESE |
2364
|
|
|
o DOMAIN_CHINESE |
2365
|
|
|
""" |
2366
|
|
|
|
2367
|
|
|
return self.network._get_url( |
2368
|
|
|
domain_name, "event") % {'id': self.get_id()} |
2369
|
|
|
|
2370
|
1 |
|
def shout(self, message): |
2371
|
|
|
""" |
2372
|
|
|
Post a shout |
2373
|
|
|
""" |
2374
|
|
|
|
2375
|
|
|
params = self._get_params() |
2376
|
|
|
params["message"] = message |
2377
|
|
|
|
2378
|
|
|
self._request("event.Shout", False, params) |
2379
|
|
|
|
2380
|
|
|
|
2381
|
1 |
|
class Country(_BaseObject): |
2382
|
|
|
"""A country at Last.fm.""" |
2383
|
|
|
|
2384
|
1 |
|
name = None |
2385
|
|
|
|
2386
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2387
|
|
|
|
2388
|
1 |
|
def __init__(self, name, network): |
2389
|
1 |
|
_BaseObject.__init__(self, network, "geo") |
2390
|
|
|
|
2391
|
1 |
|
self.name = name |
2392
|
|
|
|
2393
|
1 |
|
def __repr__(self): |
2394
|
1 |
|
return "pylast.Country(%s, %s)" % (repr(self.name), repr(self.network)) |
2395
|
|
|
|
2396
|
1 |
|
@_string_output |
2397
|
|
|
def __str__(self): |
2398
|
1 |
|
return self.get_name() |
2399
|
|
|
|
2400
|
1 |
|
def __eq__(self, other): |
2401
|
1 |
|
return self.get_name().lower() == other.get_name().lower() |
2402
|
|
|
|
2403
|
1 |
|
def __ne__(self, other): |
2404
|
1 |
|
return self.get_name() != other.get_name() |
2405
|
|
|
|
2406
|
1 |
|
def _get_params(self): # TODO can move to _BaseObject |
2407
|
1 |
|
return {'country': self.get_name()} |
2408
|
|
|
|
2409
|
1 |
|
def _get_name_from_code(self, alpha2code): |
2410
|
|
|
# TODO: Have this function lookup the alpha-2 code and return the |
2411
|
|
|
# country name. |
2412
|
|
|
|
2413
|
|
|
return alpha2code |
2414
|
|
|
|
2415
|
1 |
|
def get_name(self): |
2416
|
|
|
"""Returns the country name. """ |
2417
|
|
|
|
2418
|
1 |
|
return self.name |
2419
|
|
|
|
2420
|
1 |
|
def get_top_artists(self, limit=None, cacheable=True): |
2421
|
|
|
"""Returns a sequence of the most played artists.""" |
2422
|
1 |
|
params = self._get_params() |
2423
|
1 |
|
if limit: |
2424
|
1 |
|
params['limit'] = limit |
2425
|
|
|
|
2426
|
1 |
|
doc = self._request('geo.getTopArtists', cacheable, params) |
2427
|
|
|
|
2428
|
1 |
|
return _extract_top_artists(doc, self) |
2429
|
|
View Code Duplication |
|
|
|
|
|
2430
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
2431
|
|
|
"""Returns a sequence of the most played tracks""" |
2432
|
1 |
|
params = self._get_params() |
2433
|
1 |
|
if limit: |
2434
|
1 |
|
params['limit'] = limit |
2435
|
|
|
|
2436
|
1 |
|
return self._get_things( |
2437
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2438
|
|
|
|
2439
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2440
|
|
|
"""Returns the url of the event page on the network. |
2441
|
|
|
* domain_name: The network's language domain. Possible values: |
2442
|
|
|
o DOMAIN_ENGLISH |
2443
|
|
|
o DOMAIN_GERMAN |
2444
|
|
|
o DOMAIN_SPANISH |
2445
|
|
|
o DOMAIN_FRENCH |
2446
|
|
|
o DOMAIN_ITALIAN |
2447
|
|
|
o DOMAIN_POLISH |
2448
|
|
|
o DOMAIN_PORTUGUESE |
2449
|
|
|
o DOMAIN_SWEDISH |
2450
|
|
|
o DOMAIN_TURKISH |
2451
|
|
|
o DOMAIN_RUSSIAN |
2452
|
|
|
o DOMAIN_JAPANESE |
2453
|
|
|
o DOMAIN_CHINESE |
2454
|
|
|
""" |
2455
|
|
|
|
2456
|
1 |
|
country_name = _url_safe(self.get_name()) |
2457
|
|
|
|
2458
|
1 |
|
return self.network._get_url( |
2459
|
|
|
domain_name, "country") % {'country_name': country_name} |
2460
|
|
|
|
2461
|
|
|
|
2462
|
1 |
|
class Metro(_BaseObject): |
2463
|
|
|
"""A metro at Last.fm.""" |
2464
|
|
|
|
2465
|
1 |
|
name = None |
2466
|
1 |
|
country = None |
2467
|
|
|
|
2468
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2469
|
|
|
|
2470
|
1 |
|
def __init__(self, name, country, network): |
2471
|
1 |
|
_BaseObject.__init__(self, network, None) |
2472
|
|
|
|
2473
|
1 |
|
self.name = name |
2474
|
1 |
|
self.country = country |
2475
|
|
|
|
2476
|
1 |
|
def __repr__(self): |
2477
|
|
|
return "pylast.Metro(%s, %s, %s)" % ( |
2478
|
|
|
repr(self.name), repr(self.country), repr(self.network)) |
2479
|
|
|
|
2480
|
1 |
|
@_string_output |
2481
|
|
|
def __str__(self): |
2482
|
1 |
|
return self.get_name() + ", " + self.get_country() |
2483
|
|
|
|
2484
|
1 |
|
def __eq__(self, other): |
2485
|
1 |
|
return (self.get_name().lower() == other.get_name().lower() and |
2486
|
|
|
self.get_country().lower() == other.get_country().lower()) |
2487
|
|
|
|
2488
|
1 |
|
def __ne__(self, other): |
2489
|
1 |
|
return (self.get_name() != other.get_name() or |
2490
|
|
|
self.get_country().lower() != other.get_country().lower()) |
2491
|
|
|
|
2492
|
1 |
|
def _get_params(self): |
2493
|
1 |
|
return {'metro': self.get_name(), 'country': self.get_country()} |
2494
|
|
|
|
2495
|
1 |
|
def get_name(self): |
2496
|
|
|
"""Returns the metro name.""" |
2497
|
|
|
|
2498
|
1 |
|
return self.name |
2499
|
|
|
|
2500
|
1 |
|
def get_country(self): |
2501
|
|
|
"""Returns the metro country.""" |
2502
|
|
|
|
2503
|
1 |
|
return self.country |
2504
|
|
|
|
2505
|
1 |
|
def _get_chart( |
2506
|
|
|
self, method, tag="artist", limit=None, from_date=None, |
2507
|
|
|
to_date=None, cacheable=True): |
2508
|
|
|
"""Internal helper for getting geo charts.""" |
2509
|
|
|
params = self._get_params() |
2510
|
|
|
if limit: |
2511
|
|
|
params["limit"] = limit |
2512
|
|
|
if from_date and to_date: |
2513
|
|
|
params["from"] = from_date |
2514
|
|
|
params["to"] = to_date |
2515
|
|
|
|
2516
|
|
|
doc = self._request(method, cacheable, params) |
2517
|
|
|
|
2518
|
|
|
seq = [] |
2519
|
|
|
for node in doc.getElementsByTagName(tag): |
2520
|
|
|
if tag == "artist": |
2521
|
|
|
item = Artist(_extract(node, "name"), self.network) |
2522
|
|
|
elif tag == "track": |
2523
|
|
|
title = _extract(node, "name") |
2524
|
|
|
artist = _extract_element_tree(node).get('artist')['name'] |
2525
|
|
|
item = Track(artist, title, self.network) |
2526
|
|
|
else: |
2527
|
|
|
return None |
2528
|
|
|
weight = _number(_extract(node, "listeners")) |
2529
|
|
|
seq.append(TopItem(item, weight)) |
2530
|
|
|
|
2531
|
|
|
return seq |
2532
|
|
|
|
2533
|
1 |
|
def get_artist_chart( |
2534
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2535
|
|
|
cacheable=True): |
2536
|
|
|
"""Get a chart of artists for a metro. |
2537
|
|
|
Parameters: |
2538
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2539
|
|
|
requested |
2540
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2541
|
|
|
limit (Optional) : The number of results to fetch per page. |
2542
|
|
|
Defaults to 50. |
2543
|
|
|
""" |
2544
|
|
|
return self._get_chart( |
2545
|
|
|
"geo.getMetroArtistChart", tag=tag, limit=limit, |
2546
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2547
|
|
|
|
2548
|
1 |
|
def get_hype_artist_chart( |
2549
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2550
|
|
|
cacheable=True): |
2551
|
|
|
"""Get a chart of hyped (up and coming) artists for a metro. |
2552
|
|
|
Parameters: |
2553
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2554
|
|
|
requested |
2555
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2556
|
|
|
limit (Optional) : The number of results to fetch per page. |
2557
|
|
|
Defaults to 50. |
2558
|
|
|
""" |
2559
|
|
|
return self._get_chart( |
2560
|
|
|
"geo.getMetroHypeArtistChart", tag=tag, limit=limit, |
2561
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2562
|
|
|
|
2563
|
1 |
|
def get_unique_artist_chart( |
2564
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2565
|
|
|
cacheable=True): |
2566
|
|
|
"""Get a chart of the artists which make that metro unique. |
2567
|
|
|
Parameters: |
2568
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2569
|
|
|
requested |
2570
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2571
|
|
|
limit (Optional) : The number of results to fetch per page. |
2572
|
|
|
Defaults to 50. |
2573
|
|
|
""" |
2574
|
|
|
return self._get_chart( |
2575
|
|
|
"geo.getMetroUniqueArtistChart", tag=tag, limit=limit, |
2576
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2577
|
|
|
|
2578
|
1 |
|
def get_track_chart( |
2579
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2580
|
|
|
cacheable=True): |
2581
|
|
|
"""Get a chart of tracks for a metro. |
2582
|
|
|
Parameters: |
2583
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2584
|
|
|
requested |
2585
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2586
|
|
|
limit (Optional) : The number of results to fetch per page. |
2587
|
|
|
Defaults to 50. |
2588
|
|
|
""" |
2589
|
|
|
return self._get_chart( |
2590
|
|
|
"geo.getMetroTrackChart", tag=tag, limit=limit, |
2591
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2592
|
|
|
|
2593
|
1 |
|
def get_hype_track_chart( |
2594
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2595
|
|
|
cacheable=True): |
2596
|
|
|
"""Get a chart of tracks for a metro. |
2597
|
|
|
Parameters: |
2598
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2599
|
|
|
requested |
2600
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2601
|
|
|
limit (Optional) : The number of results to fetch per page. |
2602
|
|
|
Defaults to 50. |
2603
|
|
|
""" |
2604
|
|
|
return self._get_chart( |
2605
|
|
|
"geo.getMetroHypeTrackChart", tag=tag, |
2606
|
|
|
limit=limit, from_date=from_date, to_date=to_date, |
2607
|
|
|
cacheable=cacheable) |
2608
|
|
|
|
2609
|
1 |
|
def get_unique_track_chart( |
2610
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2611
|
|
|
cacheable=True): |
2612
|
|
|
"""Get a chart of tracks for a metro. |
2613
|
|
|
Parameters: |
2614
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2615
|
|
|
requested |
2616
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2617
|
|
|
limit (Optional) : The number of results to fetch per page. |
2618
|
|
|
Defaults to 50. |
2619
|
|
|
""" |
2620
|
|
|
return self._get_chart( |
2621
|
|
|
"geo.getMetroUniqueTrackChart", tag=tag, limit=limit, |
2622
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2623
|
|
|
|
2624
|
|
|
|
2625
|
1 |
|
class Library(_BaseObject): |
2626
|
|
|
"""A user's Last.fm library.""" |
2627
|
|
|
|
2628
|
1 |
|
user = None |
2629
|
|
|
|
2630
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2631
|
|
|
|
2632
|
1 |
|
def __init__(self, user, network): |
2633
|
1 |
|
_BaseObject.__init__(self, network, 'library') |
2634
|
|
|
|
2635
|
1 |
|
if isinstance(user, User): |
2636
|
|
|
self.user = user |
2637
|
|
|
else: |
2638
|
1 |
|
self.user = User(user, self.network) |
2639
|
|
|
|
2640
|
1 |
|
self._albums_index = 0 |
2641
|
1 |
|
self._artists_index = 0 |
2642
|
1 |
|
self._tracks_index = 0 |
2643
|
|
|
|
2644
|
1 |
|
def __repr__(self): |
2645
|
|
|
return "pylast.Library(%s, %s)" % (repr(self.user), repr(self.network)) |
2646
|
|
|
|
2647
|
1 |
|
@_string_output |
2648
|
|
|
def __str__(self): |
2649
|
|
|
return repr(self.get_user()) + "'s Library" |
2650
|
|
|
|
2651
|
1 |
|
def _get_params(self): |
2652
|
1 |
|
return {'user': self.user.get_name()} |
2653
|
|
|
|
2654
|
1 |
|
def get_user(self): |
2655
|
|
|
"""Returns the user who owns this library.""" |
2656
|
|
|
|
2657
|
|
|
return self.user |
2658
|
|
|
|
2659
|
1 |
|
def add_album(self, album): |
2660
|
|
|
"""Add an album to this library.""" |
2661
|
|
|
|
2662
|
1 |
|
params = self._get_params() |
2663
|
1 |
|
params["artist"] = album.get_artist().get_name() |
2664
|
1 |
|
params["album"] = album.get_name() |
2665
|
|
|
|
2666
|
1 |
|
self._request("library.addAlbum", False, params) |
2667
|
|
|
|
2668
|
1 |
|
def remove_album(self, album): |
2669
|
|
|
"""Remove an album from this library.""" |
2670
|
|
|
|
2671
|
|
|
params = self._get_params() |
2672
|
|
|
params["artist"] = album.get_artist().get_name() |
2673
|
|
|
params["album"] = album.get_name() |
2674
|
|
|
|
2675
|
|
|
self._request(self.ws_prefix + ".removeAlbum", False, params) |
2676
|
|
|
|
2677
|
1 |
|
def add_artist(self, artist): |
2678
|
|
|
"""Add an artist to this library.""" |
2679
|
|
|
|
2680
|
1 |
|
params = self._get_params() |
2681
|
1 |
|
if type(artist) == str: |
2682
|
1 |
View Code Duplication |
params["artist"] = artist |
|
|
|
|
2683
|
|
|
else: |
2684
|
1 |
|
params["artist"] = artist.get_name() |
2685
|
|
|
|
2686
|
1 |
|
self._request(self.ws_prefix + ".addArtist", False, params) |
2687
|
|
|
|
2688
|
1 |
|
def remove_artist(self, artist): |
2689
|
|
|
"""Remove an artist from this library.""" |
2690
|
|
|
|
2691
|
|
|
params = self._get_params() |
2692
|
|
|
if type(artist) == str: |
2693
|
|
|
params["artist"] = artist |
2694
|
|
|
else: |
2695
|
|
|
params["artist"] = artist.get_name() |
2696
|
|
|
|
2697
|
|
|
self._request(self.ws_prefix + ".removeArtist", False, params) |
2698
|
|
|
|
2699
|
1 |
|
def add_track(self, track): |
2700
|
|
|
"""Add a track to this library.""" |
2701
|
|
|
|
2702
|
|
|
params = self._get_params() |
2703
|
|
|
params["track"] = track.get_title() |
2704
|
|
|
|
2705
|
|
|
self._request(self.ws_prefix + ".addTrack", False, params) |
2706
|
|
|
|
2707
|
1 |
|
def get_albums(self, artist=None, limit=50, cacheable=True): |
2708
|
|
|
""" |
2709
|
|
|
Returns a sequence of Album objects |
2710
|
|
|
If no artist is specified, it will return all, sorted by decreasing |
2711
|
|
|
play count. |
2712
|
|
|
If limit==None it will return all (may take a while) |
2713
|
|
|
""" |
2714
|
|
|
|
2715
|
1 |
|
params = self._get_params() |
2716
|
1 |
|
if artist: |
2717
|
|
|
params["artist"] = artist |
2718
|
|
|
|
2719
|
1 |
|
seq = [] |
2720
|
1 |
|
for node in _collect_nodes( |
2721
|
|
|
limit, |
2722
|
|
|
self, |
2723
|
|
|
self.ws_prefix + ".getAlbums", |
2724
|
|
|
cacheable, |
2725
|
|
|
params): |
2726
|
|
|
name = _extract(node, "name") |
2727
|
|
|
artist = _extract(node, "name", 1) |
2728
|
|
|
playcount = _number(_extract(node, "playcount")) |
2729
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2730
|
|
|
|
2731
|
|
|
seq.append(LibraryItem( |
2732
|
|
|
Album(artist, name, self.network), playcount, tagcount)) |
2733
|
|
|
|
2734
|
|
|
return seq |
2735
|
|
|
|
2736
|
1 |
|
def get_artists(self, limit=50, cacheable=True): |
2737
|
|
|
""" |
2738
|
|
|
Returns a sequence of Album objects |
2739
|
|
|
if limit==None it will return all (may take a while) |
2740
|
|
|
""" |
2741
|
|
|
|
2742
|
|
|
seq = [] |
2743
|
|
|
for node in _collect_nodes( |
2744
|
|
|
limit, |
2745
|
|
|
self, |
2746
|
|
|
self.ws_prefix + ".getArtists", |
2747
|
|
|
cacheable): |
2748
|
|
|
name = _extract(node, "name") |
2749
|
|
|
|
2750
|
|
|
playcount = _number(_extract(node, "playcount")) |
2751
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2752
|
|
|
|
2753
|
|
|
seq.append(LibraryItem( |
2754
|
|
|
Artist(name, self.network), playcount, tagcount)) |
2755
|
|
|
|
2756
|
|
|
return seq |
2757
|
|
|
|
2758
|
1 |
|
def get_tracks(self, artist=None, album=None, limit=50, cacheable=True): |
2759
|
|
|
""" |
2760
|
|
|
Returns a sequence of Album objects |
2761
|
|
|
If limit==None it will return all (may take a while) |
2762
|
|
|
""" |
2763
|
|
|
|
2764
|
|
|
params = self._get_params() |
2765
|
|
|
if artist: |
2766
|
|
|
params["artist"] = artist |
2767
|
|
|
if album: |
2768
|
|
|
params["album"] = album |
2769
|
|
|
|
2770
|
|
|
seq = [] |
2771
|
|
|
for node in _collect_nodes( |
2772
|
|
|
limit, |
2773
|
|
|
self, |
2774
|
|
|
self.ws_prefix + ".getTracks", |
2775
|
|
|
cacheable, |
2776
|
|
|
params): |
2777
|
|
|
name = _extract(node, "name") |
2778
|
|
|
artist = _extract(node, "name", 1) |
2779
|
|
|
playcount = _number(_extract(node, "playcount")) |
2780
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2781
|
|
|
|
2782
|
|
|
seq.append(LibraryItem( |
2783
|
|
|
Track(artist, name, self.network), playcount, tagcount)) |
2784
|
|
|
|
2785
|
|
|
return seq |
2786
|
|
|
|
2787
|
1 |
|
def remove_scrobble(self, artist, title, timestamp): |
2788
|
|
|
"""Remove a scrobble from a user's Last.fm library. Parameters: |
2789
|
|
|
artist (Required) : The artist that composed the track |
2790
|
|
|
title (Required) : The name of the track |
2791
|
|
|
timestamp (Required) : The unix timestamp of the scrobble |
2792
|
|
|
that you wish to remove |
2793
|
|
|
""" |
2794
|
|
|
|
2795
|
1 |
|
params = self._get_params() |
2796
|
1 |
|
params["artist"] = artist |
2797
|
1 |
|
params["track"] = title |
2798
|
1 |
|
params["timestamp"] = timestamp |
2799
|
|
|
|
2800
|
1 |
|
self._request(self.ws_prefix + ".removeScrobble", False, params) |
2801
|
|
|
|
2802
|
|
|
|
2803
|
1 |
|
class Playlist(_BaseObject): |
2804
|
|
|
"""A Last.fm user playlist.""" |
2805
|
|
|
|
2806
|
1 |
|
id = None |
2807
|
1 |
|
user = None |
2808
|
|
|
|
2809
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2810
|
|
|
|
2811
|
1 |
|
def __init__(self, user, playlist_id, network): |
2812
|
1 |
|
_BaseObject.__init__(self, network, "playlist") |
2813
|
|
|
|
2814
|
1 |
|
if isinstance(user, User): |
2815
|
|
|
self.user = user |
2816
|
|
|
else: |
2817
|
1 |
|
self.user = User(user, self.network) |
2818
|
|
|
|
2819
|
1 |
|
self.id = playlist_id |
2820
|
|
|
|
2821
|
1 |
|
@_string_output |
2822
|
|
|
def __str__(self): |
2823
|
|
|
return repr(self.user) + "'s playlist # " + repr(self.id) |
2824
|
|
|
|
2825
|
1 |
|
def _get_info_node(self): |
2826
|
|
|
""" |
2827
|
|
|
Returns the node from user.getPlaylists where this playlist's info is. |
2828
|
|
|
""" |
2829
|
|
|
|
2830
|
1 |
|
doc = self._request("user.getPlaylists", True) |
2831
|
|
|
|
2832
|
|
|
for node in doc.getElementsByTagName("playlist"): |
2833
|
|
|
if _extract(node, "id") == str(self.get_id()): |
2834
|
|
|
return node |
2835
|
|
|
|
2836
|
1 |
|
def _get_params(self): |
2837
|
1 |
|
return {'user': self.user.get_name(), 'playlistID': self.get_id()} |
2838
|
|
|
|
2839
|
1 |
|
def get_id(self): |
2840
|
|
|
"""Returns the playlist ID.""" |
2841
|
|
|
|
2842
|
1 |
|
return self.id |
2843
|
|
|
|
2844
|
1 |
|
def get_user(self): |
2845
|
|
|
"""Returns the owner user of this playlist.""" |
2846
|
|
|
|
2847
|
|
|
return self.user |
2848
|
|
|
|
2849
|
1 |
|
def get_tracks(self): |
2850
|
|
|
"""Returns a list of the tracks on this user playlist.""" |
2851
|
|
|
|
2852
|
|
|
uri = _unicode('lastfm://playlist/%s') % self.get_id() |
2853
|
|
|
|
2854
|
|
|
return XSPF(uri, self.network).get_tracks() |
2855
|
|
|
|
2856
|
1 |
|
def add_track(self, track): |
2857
|
|
|
"""Adds a Track to this Playlist.""" |
2858
|
|
|
|
2859
|
|
|
params = self._get_params() |
2860
|
|
|
params['artist'] = track.get_artist().get_name() |
2861
|
|
|
params['track'] = track.get_title() |
2862
|
|
|
|
2863
|
|
|
self._request('playlist.addTrack', False, params) |
2864
|
|
|
|
2865
|
1 |
|
def get_title(self): |
2866
|
|
|
"""Returns the title of this playlist.""" |
2867
|
|
|
|
2868
|
|
|
return _extract(self._get_info_node(), "title") |
2869
|
|
|
|
2870
|
1 |
|
def get_creation_date(self): |
2871
|
|
|
"""Returns the creation date of this playlist.""" |
2872
|
|
|
|
2873
|
|
|
return _extract(self._get_info_node(), "date") |
2874
|
|
|
|
2875
|
1 |
|
def get_size(self): |
2876
|
|
|
"""Returns the number of tracks in this playlist.""" |
2877
|
|
|
|
2878
|
|
|
return _number(_extract(self._get_info_node(), "size")) |
2879
|
|
|
|
2880
|
1 |
|
def get_description(self): |
2881
|
|
|
"""Returns the description of this playlist.""" |
2882
|
|
|
|
2883
|
|
|
return _extract(self._get_info_node(), "description") |
2884
|
|
|
|
2885
|
1 |
|
def get_duration(self): |
2886
|
|
|
"""Returns the duration of this playlist in milliseconds.""" |
2887
|
|
|
|
2888
|
|
|
return _number(_extract(self._get_info_node(), "duration")) |
2889
|
|
|
|
2890
|
1 |
|
def is_streamable(self): |
2891
|
|
|
""" |
2892
|
|
|
Returns True if the playlist is streamable. |
2893
|
|
|
For a playlist to be streamable, it needs at least 45 tracks by 15 |
2894
|
|
|
different artists.""" |
2895
|
|
|
|
2896
|
|
|
if _extract(self._get_info_node(), "streamable") == '1': |
2897
|
|
|
return True |
2898
|
|
|
else: |
2899
|
|
|
return False |
2900
|
|
|
|
2901
|
1 |
|
def has_track(self, track): |
2902
|
|
|
"""Checks to see if track is already in the playlist. |
2903
|
|
|
* track: Any Track object. |
2904
|
|
|
""" |
2905
|
|
|
|
2906
|
|
|
return track in self.get_tracks() |
2907
|
|
|
|
2908
|
1 |
|
def get_cover_image(self, size=COVER_EXTRA_LARGE): |
2909
|
|
|
""" |
2910
|
|
|
Returns a uri to the cover image |
2911
|
|
|
size can be one of: |
2912
|
|
|
COVER_MEGA |
2913
|
|
|
COVER_EXTRA_LARGE |
2914
|
|
|
COVER_LARGE |
2915
|
|
|
COVER_MEDIUM |
2916
|
|
|
COVER_SMALL |
2917
|
|
|
""" |
2918
|
|
|
|
2919
|
|
|
return _extract(self._get_info_node(), "image")[size] |
2920
|
|
|
|
2921
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2922
|
|
|
"""Returns the url of the playlist on the network. |
2923
|
|
|
* domain_name: The network's language domain. Possible values: |
2924
|
|
|
o DOMAIN_ENGLISH |
2925
|
|
|
o DOMAIN_GERMAN |
2926
|
|
|
o DOMAIN_SPANISH |
2927
|
|
|
o DOMAIN_FRENCH |
2928
|
|
|
o DOMAIN_ITALIAN |
2929
|
|
|
o DOMAIN_POLISH |
2930
|
|
|
o DOMAIN_PORTUGUESE |
2931
|
|
|
o DOMAIN_SWEDISH |
2932
|
|
|
o DOMAIN_TURKISH |
2933
|
|
|
o DOMAIN_RUSSIAN |
2934
|
|
|
o DOMAIN_JAPANESE |
2935
|
|
|
o DOMAIN_CHINESE |
2936
|
|
|
""" |
2937
|
|
|
|
2938
|
1 |
|
english_url = _extract(self._get_info_node(), "url") |
2939
|
|
|
appendix = english_url[english_url.rfind("/") + 1:] |
2940
|
|
|
|
2941
|
|
|
return self.network._get_url(domain_name, "playlist") % { |
2942
|
|
|
'appendix': appendix, "user": self.get_user().get_name()} |
2943
|
|
|
|
2944
|
|
|
|
2945
|
1 |
|
class Tag(_BaseObject, _Chartable): |
2946
|
|
|
"""A Last.fm object tag.""" |
2947
|
|
|
|
2948
|
1 |
|
name = None |
2949
|
|
|
|
2950
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2951
|
|
|
|
2952
|
1 |
|
def __init__(self, name, network): |
2953
|
1 |
|
_BaseObject.__init__(self, network, 'tag') |
2954
|
1 |
|
_Chartable.__init__(self, 'tag') |
2955
|
|
|
|
2956
|
1 |
|
self.name = name |
2957
|
|
|
|
2958
|
1 |
|
def __repr__(self): |
2959
|
1 |
|
return "pylast.Tag(%s, %s)" % (repr(self.name), repr(self.network)) |
2960
|
|
|
|
2961
|
1 |
|
@_string_output |
2962
|
|
|
def __str__(self): |
2963
|
1 |
|
return self.get_name() |
2964
|
|
|
|
2965
|
1 |
|
def __eq__(self, other): |
2966
|
1 |
|
return self.get_name().lower() == other.get_name().lower() |
2967
|
|
|
|
2968
|
1 |
|
def __ne__(self, other): |
2969
|
1 |
|
return self.get_name().lower() != other.get_name().lower() |
2970
|
|
|
|
2971
|
1 |
|
def _get_params(self): |
2972
|
1 |
|
return {self.ws_prefix: self.get_name()} |
2973
|
|
|
|
2974
|
1 |
|
def get_name(self, properly_capitalized=False): |
2975
|
|
|
"""Returns the name of the tag. """ |
2976
|
|
|
|
2977
|
1 |
|
if properly_capitalized: |
2978
|
1 |
|
self.name = _extract( |
2979
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
2980
|
|
|
|
2981
|
1 |
|
return self.name |
2982
|
|
|
|
2983
|
1 |
|
def get_similar(self): |
2984
|
|
|
"""Returns the tags similar to this one, ordered by similarity. """ |
2985
|
|
|
|
2986
|
1 |
|
doc = self._request(self.ws_prefix + '.getSimilar', True) |
2987
|
|
|
|
2988
|
1 |
|
seq = [] |
2989
|
1 |
|
names = _extract_all(doc, 'name') |
2990
|
1 |
|
for name in names: |
2991
|
|
|
seq.append(Tag(name, self.network)) |
2992
|
|
|
|
2993
|
1 |
|
return seq |
2994
|
|
|
|
2995
|
1 |
|
def get_top_albums(self, limit=None, cacheable=True): |
2996
|
|
|
"""Retuns a list of the top albums.""" |
2997
|
1 |
|
params = self._get_params() |
2998
|
1 |
|
if limit: |
2999
|
1 |
|
params['limit'] = limit |
3000
|
|
|
|
3001
|
1 |
|
doc = self._request( |
3002
|
|
|
self.ws_prefix + '.getTopAlbums', cacheable, params) |
3003
|
|
|
|
3004
|
1 |
|
return _extract_top_albums(doc, self.network) |
3005
|
|
|
|
3006
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
3007
|
|
|
"""Returns a list of the most played Tracks for this tag.""" |
3008
|
1 |
|
params = self._get_params() |
3009
|
1 |
|
if limit: |
3010
|
1 |
|
params['limit'] = limit |
3011
|
|
|
|
3012
|
1 |
|
return self._get_things( |
3013
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
3014
|
|
|
|
3015
|
1 |
|
def get_top_artists(self, limit=None, cacheable=True): |
3016
|
|
|
"""Returns a sequence of the most played artists.""" |
3017
|
|
|
|
3018
|
1 |
|
params = self._get_params() |
3019
|
1 |
|
if limit: |
3020
|
1 |
|
params['limit'] = limit |
3021
|
|
|
|
3022
|
1 |
|
doc = self._request( |
3023
|
|
|
self.ws_prefix + '.getTopArtists', cacheable, params) |
3024
|
|
|
|
3025
|
1 |
|
return _extract_top_artists(doc, self.network) |
3026
|
|
|
|
3027
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3028
|
|
|
"""Returns the url of the tag page on the network. |
3029
|
|
|
* domain_name: The network's language domain. Possible values: |
3030
|
|
|
o DOMAIN_ENGLISH |
3031
|
|
|
o DOMAIN_GERMAN |
3032
|
|
|
o DOMAIN_SPANISH |
3033
|
|
|
o DOMAIN_FRENCH |
3034
|
|
|
o DOMAIN_ITALIAN |
3035
|
|
|
o DOMAIN_POLISH |
3036
|
|
|
o DOMAIN_PORTUGUESE |
3037
|
|
|
o DOMAIN_SWEDISH |
3038
|
|
|
o DOMAIN_TURKISH |
3039
|
|
|
o DOMAIN_RUSSIAN |
3040
|
|
|
o DOMAIN_JAPANESE |
3041
|
|
|
o DOMAIN_CHINESE |
3042
|
|
|
""" |
3043
|
|
|
|
3044
|
1 |
|
name = _url_safe(self.get_name()) |
3045
|
|
|
|
3046
|
1 |
|
return self.network._get_url(domain_name, "tag") % {'name': name} |
3047
|
|
|
|
3048
|
|
|
|
3049
|
1 |
|
class Track(_Opus): |
3050
|
|
|
"""A Last.fm track.""" |
3051
|
|
|
|
3052
|
1 |
|
__hash__ = _Opus.__hash__ |
3053
|
|
|
|
3054
|
1 |
|
def __init__(self, artist, title, network, username=None): |
3055
|
1 |
|
super(Track, self).__init__(artist, title, network, "track", username) |
3056
|
|
|
|
3057
|
1 |
|
def get_correction(self): |
3058
|
|
|
"""Returns the corrected track name.""" |
3059
|
|
|
|
3060
|
1 |
|
return _extract( |
3061
|
|
|
self._request(self.ws_prefix + ".getCorrection"), "name") |
3062
|
|
|
|
3063
|
1 |
|
def get_duration(self): |
3064
|
|
|
"""Returns the track duration.""" |
3065
|
|
|
|
3066
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3067
|
|
|
|
3068
|
|
|
return _number(_extract(doc, "duration")) |
3069
|
|
|
|
3070
|
1 |
|
def get_userloved(self): |
3071
|
|
|
"""Whether the user loved this track""" |
3072
|
|
|
|
3073
|
1 |
|
if not self.username: |
3074
|
|
|
return |
3075
|
|
|
|
3076
|
1 |
|
params = self._get_params() |
3077
|
1 |
|
params['username'] = self.username |
3078
|
|
|
|
3079
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
3080
|
1 |
|
loved = _number(_extract(doc, "userloved")) |
3081
|
1 |
|
return bool(loved) |
3082
|
|
|
|
3083
|
1 |
|
def is_streamable(self): |
3084
|
|
|
"""Returns True if the track is available at Last.fm.""" |
3085
|
|
|
|
3086
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3087
|
|
|
return _extract(doc, "streamable") == "1" |
3088
|
|
|
|
3089
|
1 |
|
def is_fulltrack_available(self): |
3090
|
|
|
"""Returns True if the fulltrack is available for streaming.""" |
3091
|
|
|
|
3092
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3093
|
|
|
return doc.getElementsByTagName( |
3094
|
|
|
"streamable")[0].getAttribute("fulltrack") == "1" |
3095
|
|
|
|
3096
|
1 |
|
def get_album(self): |
3097
|
|
|
"""Returns the album object of this track.""" |
3098
|
|
|
|
3099
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3100
|
|
|
|
3101
|
|
|
albums = doc.getElementsByTagName("album") |
3102
|
|
|
|
3103
|
|
|
if len(albums) == 0: |
3104
|
|
|
return |
3105
|
|
|
|
3106
|
|
|
node = doc.getElementsByTagName("album")[0] |
3107
|
|
|
return Album( |
3108
|
|
|
_extract(node, "artist"), _extract(node, "title"), self.network) |
3109
|
|
|
|
3110
|
1 |
|
def love(self): |
3111
|
|
|
"""Adds the track to the user's loved tracks. """ |
3112
|
|
|
|
3113
|
1 |
|
self._request(self.ws_prefix + '.love') |
3114
|
|
|
|
3115
|
1 |
|
def unlove(self): |
3116
|
|
|
"""Remove the track to the user's loved tracks. """ |
3117
|
|
|
|
3118
|
1 |
|
self._request(self.ws_prefix + '.unlove') |
3119
|
|
|
|
3120
|
1 |
|
def ban(self): |
3121
|
|
|
"""Ban this track from ever playing on the radio. """ |
3122
|
|
|
|
3123
|
|
|
self._request(self.ws_prefix + '.ban') |
3124
|
|
|
|
3125
|
1 |
|
def get_similar(self): |
3126
|
|
|
""" |
3127
|
|
|
Returns similar tracks for this track on the network, |
3128
|
|
|
based on listening data. |
3129
|
|
|
""" |
3130
|
|
|
|
3131
|
|
|
doc = self._request(self.ws_prefix + '.getSimilar', True) |
3132
|
|
|
|
3133
|
|
|
seq = [] |
3134
|
|
|
for node in doc.getElementsByTagName(self.ws_prefix): |
3135
|
|
|
title = _extract(node, 'name') |
3136
|
|
|
artist = _extract(node, 'name', 1) |
3137
|
|
|
match = _number(_extract(node, "match")) |
3138
|
|
|
|
3139
|
|
|
seq.append(SimilarItem(Track(artist, title, self.network), match)) |
3140
|
|
|
|
3141
|
|
|
return seq |
3142
|
|
|
|
3143
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3144
|
|
|
"""Returns the URL of the album or track page on the network. |
3145
|
|
|
# Parameters: |
3146
|
|
|
* domain_name str: The network's language domain. Possible values: |
3147
|
|
|
o DOMAIN_ENGLISH |
3148
|
|
|
o DOMAIN_GERMAN |
3149
|
|
|
o DOMAIN_SPANISH |
3150
|
|
|
o DOMAIN_FRENCH |
3151
|
|
|
o DOMAIN_ITALIAN |
3152
|
|
|
o DOMAIN_POLISH |
3153
|
|
|
o DOMAIN_PORTUGUESE |
3154
|
|
|
o DOMAIN_SWEDISH |
3155
|
|
|
o DOMAIN_TURKISH |
3156
|
|
|
o DOMAIN_RUSSIAN |
3157
|
|
|
o DOMAIN_JAPANESE |
3158
|
|
|
o DOMAIN_CHINESE |
3159
|
|
|
""" |
3160
|
|
|
|
3161
|
1 |
|
artist = _url_safe(self.get_artist().get_name()) |
3162
|
1 |
|
title = _url_safe(self.get_title()) |
3163
|
|
|
|
3164
|
1 |
|
return self.network._get_url( |
3165
|
|
|
domain_name, self.ws_prefix) % { |
3166
|
|
|
'artist': artist, 'title': title} |
3167
|
|
|
|
3168
|
|
|
|
3169
|
1 |
|
class Group(_BaseObject, _Chartable): |
3170
|
|
|
"""A Last.fm group.""" |
3171
|
|
|
|
3172
|
1 |
|
name = None |
3173
|
|
|
|
3174
|
1 |
|
__hash__ = _BaseObject.__hash__ |
3175
|
|
|
|
3176
|
1 |
|
def __init__(self, name, network): |
3177
|
1 |
|
_BaseObject.__init__(self, network, 'group') |
3178
|
1 |
|
_Chartable.__init__(self, 'group') |
3179
|
|
|
|
3180
|
1 |
|
self.name = name |
3181
|
|
|
|
3182
|
1 |
|
def __repr__(self): |
3183
|
|
|
return "pylast.Group(%s, %s)" % (repr(self.name), repr(self.network)) |
3184
|
|
|
|
3185
|
1 |
|
@_string_output |
3186
|
|
|
def __str__(self): |
3187
|
|
|
return self.get_name() |
3188
|
|
|
|
3189
|
1 |
|
def __eq__(self, other): |
3190
|
|
|
return self.get_name().lower() == other.get_name().lower() |
3191
|
|
|
|
3192
|
1 |
|
def __ne__(self, other): |
3193
|
|
|
return self.get_name() != other.get_name() |
3194
|
|
|
|
3195
|
1 |
|
def _get_params(self): |
3196
|
1 |
|
return {self.ws_prefix: self.get_name()} |
3197
|
|
|
|
3198
|
1 |
|
def get_name(self): |
3199
|
|
|
"""Returns the group name. """ |
3200
|
1 |
|
return self.name |
3201
|
|
|
|
3202
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3203
|
|
|
"""Returns the url of the group page on the network. |
3204
|
|
|
* domain_name: The network's language domain. Possible values: |
3205
|
|
|
o DOMAIN_ENGLISH |
3206
|
|
|
o DOMAIN_GERMAN |
3207
|
|
|
o DOMAIN_SPANISH |
3208
|
|
|
o DOMAIN_FRENCH |
3209
|
|
|
o DOMAIN_ITALIAN |
3210
|
|
|
o DOMAIN_POLISH |
3211
|
|
|
o DOMAIN_PORTUGUESE |
3212
|
|
|
o DOMAIN_SWEDISH |
3213
|
|
|
o DOMAIN_TURKISH |
3214
|
|
|
o DOMAIN_RUSSIAN |
3215
|
|
|
o DOMAIN_JAPANESE |
3216
|
|
|
o DOMAIN_CHINESE |
3217
|
|
|
""" |
3218
|
|
|
|
3219
|
|
|
name = _url_safe(self.get_name()) |
3220
|
|
|
|
3221
|
|
|
return self.network._get_url(domain_name, "group") % {'name': name} |
3222
|
|
|
|
3223
|
1 |
|
def get_members(self, limit=50, cacheable=False): |
3224
|
|
|
""" |
3225
|
|
|
Returns a sequence of User objects |
3226
|
|
|
if limit==None it will return all |
3227
|
|
|
""" |
3228
|
|
|
|
3229
|
1 |
|
nodes = _collect_nodes( |
3230
|
|
|
limit, self, self.ws_prefix + ".getMembers", cacheable) |
3231
|
|
|
|
3232
|
|
|
users = [] |
3233
|
|
|
|
3234
|
|
|
for node in nodes: |
3235
|
|
|
users.append(User(_extract(node, "name"), self.network)) |
3236
|
|
|
|
3237
|
|
|
return users |
3238
|
|
|
|
3239
|
|
|
|
3240
|
1 |
|
class XSPF(_BaseObject): |
3241
|
|
|
"A Last.fm XSPF playlist.""" |
3242
|
|
|
|
3243
|
1 |
|
uri = None |
3244
|
|
|
|
3245
|
1 |
|
__hash__ = _BaseObject.__hash__ |
3246
|
|
|
|
3247
|
1 |
|
def __init__(self, uri, network): |
3248
|
1 |
|
_BaseObject.__init__(self, network, None) |
3249
|
|
|
|
3250
|
1 |
|
self.uri = uri |
3251
|
|
|
|
3252
|
1 |
|
def _get_params(self): |
3253
|
1 |
|
return {'playlistURL': self.get_uri()} |
3254
|
|
|
|
3255
|
1 |
|
@_string_output |
3256
|
|
|
def __str__(self): |
3257
|
|
|
return self.get_uri() |
3258
|
|
|
|
3259
|
1 |
|
def __eq__(self, other): |
3260
|
|
|
return self.get_uri() == other.get_uri() |
3261
|
|
|
|
3262
|
1 |
|
def __ne__(self, other): |
3263
|
|
|
return self.get_uri() != other.get_uri() |
3264
|
|
View Code Duplication |
|
|
|
|
|
3265
|
1 |
|
def get_uri(self): |
3266
|
|
|
"""Returns the Last.fm playlist URI. """ |
3267
|
|
|
|
3268
|
1 |
|
return self.uri |
3269
|
|
|
|
3270
|
1 |
|
def get_tracks(self): |
3271
|
|
|
"""Returns the tracks on this playlist.""" |
3272
|
|
|
|
3273
|
|
|
doc = self._request('playlist.fetch', True) |
3274
|
|
|
|
3275
|
|
|
seq = [] |
3276
|
|
|
for node in doc.getElementsByTagName('track'): |
3277
|
|
|
title = _extract(node, 'title') |
3278
|
|
|
artist = _extract(node, 'creator') |
3279
|
|
|
|
3280
|
|
|
seq.append(Track(artist, title, self.network)) |
3281
|
|
|
|
3282
|
|
|
return seq |
3283
|
|
|
|
3284
|
|
|
|
3285
|
1 |
|
class User(_BaseObject, _Chartable): |
3286
|
|
|
"""A Last.fm user.""" |
3287
|
|
|
|
3288
|
1 |
|
name = None |
3289
|
|
|
|
3290
|
1 |
|
__hash__ = _BaseObject.__hash__ |
3291
|
|
|
|
3292
|
1 |
|
def __init__(self, user_name, network): |
3293
|
1 |
|
_BaseObject.__init__(self, network, 'user') |
3294
|
1 |
|
_Chartable.__init__(self, 'user') |
3295
|
|
|
|
3296
|
1 |
|
self.name = user_name |
3297
|
|
|
|
3298
|
1 |
|
self._past_events_index = 0 |
3299
|
1 |
|
self._recommended_events_index = 0 |
3300
|
1 |
|
self._recommended_artists_index = 0 |
3301
|
|
|
|
3302
|
1 |
|
def __repr__(self): |
3303
|
|
|
return "pylast.User(%s, %s)" % (repr(self.name), repr(self.network)) |
3304
|
|
|
|
3305
|
1 |
|
@_string_output |
3306
|
|
|
def __str__(self): |
3307
|
|
View Code Duplication |
return self.get_name() |
|
|
|
|
3308
|
|
|
|
3309
|
1 |
|
def __eq__(self, another): |
3310
|
1 |
|
if isinstance(another, User): |
3311
|
1 |
|
return self.get_name() == another.get_name() |
3312
|
|
|
else: |
3313
|
|
|
return False |
3314
|
|
|
|
3315
|
1 |
|
def __ne__(self, another): |
3316
|
|
|
if isinstance(another, User): |
3317
|
|
|
return self.get_name() != another.get_name() |
3318
|
|
|
else: |
3319
|
|
|
return True |
3320
|
|
|
|
3321
|
1 |
|
def _get_params(self): |
3322
|
1 |
|
return {self.ws_prefix: self.get_name()} |
3323
|
|
|
|
3324
|
1 |
|
def get_name(self, properly_capitalized=False): |
3325
|
|
|
"""Returns the user name.""" |
3326
|
|
|
|
3327
|
1 |
|
if properly_capitalized: |
3328
|
|
|
self.name = _extract( |
3329
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
3330
|
|
|
|
3331
|
1 |
|
return self.name |
3332
|
|
|
|
3333
|
1 |
|
def get_upcoming_events(self): |
3334
|
|
|
"""Returns all the upcoming events for this user.""" |
3335
|
|
|
|
3336
|
1 |
|
doc = self._request(self.ws_prefix + '.getEvents', True) |
3337
|
|
|
|
3338
|
|
|
return _extract_events_from_doc(doc, self.network) |
3339
|
|
|
|
3340
|
1 |
|
def get_artist_tracks(self, artist, cacheable=False): |
3341
|
|
|
""" |
3342
|
|
|
Get a list of tracks by a given artist scrobbled by this user, |
3343
|
|
|
including scrobble time. |
3344
|
|
|
""" |
3345
|
|
|
# Not implemented: |
3346
|
|
|
# "Can be limited to specific timeranges, defaults to all time." |
3347
|
|
|
|
3348
|
1 |
|
params = self._get_params() |
3349
|
1 |
|
params['artist'] = artist |
3350
|
|
|
|
3351
|
1 |
|
seq = [] |
3352
|
1 |
|
for track in _collect_nodes( |
3353
|
|
|
None, |
3354
|
|
|
self, |
3355
|
|
|
self.ws_prefix + ".getArtistTracks", |
3356
|
|
|
cacheable, |
3357
|
|
|
params): |
3358
|
1 |
|
title = _extract(track, "name") |
3359
|
1 |
|
artist = _extract(track, "artist") |
3360
|
1 |
|
date = _extract(track, "date") |
3361
|
1 |
|
album = _extract(track, "album") |
3362
|
1 |
|
timestamp = track.getElementsByTagName( |
3363
|
|
|
"date")[0].getAttribute("uts") |
3364
|
|
|
|
3365
|
1 |
|
seq.append(PlayedTrack( |
3366
|
|
|
Track(artist, title, self.network), album, date, timestamp)) |
3367
|
|
|
|
3368
|
1 |
|
return seq |
3369
|
|
|
|
3370
|
1 |
|
def get_friends(self, limit=50, cacheable=False): |
3371
|
|
|
"""Returns a list of the user's friends. """ |
3372
|
|
|
|
3373
|
|
|
seq = [] |
3374
|
|
|
for node in _collect_nodes( |
3375
|
|
|
limit, |
3376
|
|
|
self, |
3377
|
|
|
self.ws_prefix + ".getFriends", |
3378
|
|
|
cacheable): |
3379
|
|
|
seq.append(User(_extract(node, "name"), self.network)) |
3380
|
|
|
|
3381
|
|
|
return seq |
3382
|
|
|
|
3383
|
1 |
|
def get_loved_tracks(self, limit=50, cacheable=True): |
3384
|
|
|
""" |
3385
|
|
|
Returns this user's loved track as a sequence of LovedTrack objects in |
3386
|
|
|
reverse order of their timestamp, all the way back to the first track. |
3387
|
|
|
|
3388
|
|
|
If limit==None, it will try to pull all the available data. |
3389
|
|
|
|
3390
|
|
|
This method uses caching. Enable caching only if you're pulling a |
3391
|
|
|
large amount of data. |
3392
|
|
|
|
3393
|
|
|
Use extract_items() with the return of this function to |
3394
|
|
|
get only a sequence of Track objects with no playback dates. |
3395
|
|
|
""" |
3396
|
|
|
|
3397
|
1 |
|
params = self._get_params() |
3398
|
1 |
|
if limit: |
3399
|
1 |
|
params['limit'] = limit |
3400
|
|
|
|
3401
|
1 |
|
seq = [] |
3402
|
1 |
|
for track in _collect_nodes( |
3403
|
|
|
limit, |
3404
|
|
|
self, |
3405
|
|
|
self.ws_prefix + ".getLovedTracks", |
3406
|
|
|
cacheable, |
3407
|
|
|
params): |
3408
|
1 |
|
title = _extract(track, "name") |
3409
|
1 |
|
artist = _extract(track, "name", 1) |
3410
|
1 |
|
date = _extract(track, "date") |
3411
|
1 |
|
timestamp = track.getElementsByTagName( |
3412
|
|
|
"date")[0].getAttribute("uts") |
3413
|
|
|
|
3414
|
1 |
View Code Duplication |
seq.append(LovedTrack( |
|
|
|
|
3415
|
|
|
Track(artist, title, self.network), date, timestamp)) |
3416
|
|
|
|
3417
|
1 |
|
return seq |
3418
|
|
|
|
3419
|
1 |
|
def get_neighbours(self, limit=50, cacheable=True): |
3420
|
|
|
"""Returns a list of the user's friends.""" |
3421
|
|
|
|
3422
|
1 |
|
params = self._get_params() |
3423
|
1 |
|
if limit: |
3424
|
1 |
|
params['limit'] = limit |
3425
|
|
|
|
3426
|
1 |
|
doc = self._request( |
3427
|
|
|
self.ws_prefix + '.getNeighbours', cacheable, params) |
3428
|
|
|
|
3429
|
|
|
seq = [] |
3430
|
|
|
names = _extract_all(doc, 'name') |
3431
|
|
|
|
3432
|
|
|
for name in names: |
3433
|
|
|
seq.append(User(name, self.network)) |
3434
|
|
|
|
3435
|
|
|
return seq |
3436
|
|
|
|
3437
|
1 |
|
def get_past_events(self, limit=50, cacheable=False): |
3438
|
|
|
""" |
3439
|
|
|
Returns a sequence of Event objects |
3440
|
|
|
if limit==None it will return all |
3441
|
|
|
""" |
3442
|
|
|
|
3443
|
1 |
|
seq = [] |
3444
|
1 |
|
for node in _collect_nodes( |
3445
|
|
|
limit, |
3446
|
|
|
self, |
3447
|
|
|
self.ws_prefix + ".getPastEvents", |
3448
|
|
|
cacheable): |
3449
|
|
|
seq.append(Event(_extract(node, "id"), self.network)) |
3450
|
|
|
|
3451
|
|
|
return seq |
3452
|
|
|
|
3453
|
1 |
|
def get_playlists(self): |
3454
|
|
|
"""Returns a list of Playlists that this user owns.""" |
3455
|
|
|
|
3456
|
|
|
doc = self._request(self.ws_prefix + ".getPlaylists", True) |
3457
|
|
|
|
3458
|
|
|
playlists = [] |
3459
|
|
|
for playlist_id in _extract_all(doc, "id"): |
3460
|
|
|
playlists.append( |
3461
|
|
|
Playlist(self.get_name(), playlist_id, self.network)) |
3462
|
|
|
|
3463
|
|
|
return playlists |
3464
|
|
|
|
3465
|
1 |
|
def get_now_playing(self): |
3466
|
|
|
""" |
3467
|
|
|
Returns the currently playing track, or None if nothing is playing. |
3468
|
|
|
""" |
3469
|
|
|
|
3470
|
1 |
|
params = self._get_params() |
3471
|
1 |
|
params['limit'] = '1' |
3472
|
|
|
|
3473
|
1 |
|
doc = self._request(self.ws_prefix + '.getRecentTracks', False, params) |
3474
|
|
|
|
3475
|
1 |
|
tracks = doc.getElementsByTagName('track') |
3476
|
|
|
|
3477
|
1 |
|
if len(tracks) == 0: |
3478
|
1 |
|
return None |
3479
|
|
|
|
3480
|
1 |
|
e = tracks[0] |
3481
|
|
|
|
3482
|
1 |
|
if not e.hasAttribute('nowplaying'): |
3483
|
|
|
return None |
3484
|
|
|
|
3485
|
1 |
|
artist = _extract(e, 'artist') |
3486
|
1 |
|
title = _extract(e, 'name') |
3487
|
|
|
|
3488
|
1 |
|
return Track(artist, title, self.network, self.name) |
3489
|
|
|
|
3490
|
1 |
|
def get_recent_tracks(self, limit=10, cacheable=True, |
3491
|
|
|
time_from=None, time_to=None): |
3492
|
|
|
""" |
3493
|
|
|
Returns this user's played track as a sequence of PlayedTrack objects |
3494
|
|
|
in reverse order of playtime, all the way back to the first track. |
3495
|
|
|
|
3496
|
|
|
Parameters: |
3497
|
|
|
limit : If None, it will try to pull all the available data. |
3498
|
|
|
from (Optional) : Beginning timestamp of a range - only display |
3499
|
|
|
scrobbles after this time, in UNIX timestamp format (integer |
3500
|
|
|
number of seconds since 00:00:00, January 1st 1970 UTC). This |
3501
|
|
|
must be in the UTC time zone. |
3502
|
|
|
to (Optional) : End timestamp of a range - only display scrobbles |
3503
|
|
|
before this time, in UNIX timestamp format (integer number of |
3504
|
|
|
seconds since 00:00:00, January 1st 1970 UTC). This must be in |
3505
|
|
|
the UTC time zone. |
3506
|
|
|
|
3507
|
|
|
This method uses caching. Enable caching only if you're pulling a |
3508
|
|
|
large amount of data. |
3509
|
|
|
|
3510
|
|
|
Use extract_items() with the return of this function to |
3511
|
|
|
get only a sequence of Track objects with no playback dates. |
3512
|
|
|
""" |
3513
|
|
|
|
3514
|
1 |
|
params = self._get_params() |
3515
|
1 |
|
if limit: |
3516
|
1 |
|
params['limit'] = limit |
3517
|
1 |
|
if time_from: |
3518
|
1 |
|
params['from'] = time_from |
3519
|
1 |
|
if time_to: |
3520
|
1 |
|
params['to'] = time_to |
3521
|
|
|
|
3522
|
1 |
|
seq = [] |
3523
|
1 |
|
for track in _collect_nodes( |
3524
|
|
|
limit, |
3525
|
|
|
self, |
3526
|
|
|
self.ws_prefix + ".getRecentTracks", |
3527
|
|
|
cacheable, |
3528
|
|
|
params): |
3529
|
|
|
|
3530
|
1 |
|
if track.hasAttribute('nowplaying'): |
3531
|
|
|
continue # to prevent the now playing track from sneaking in |
3532
|
|
|
|
3533
|
1 |
|
title = _extract(track, "name") |
3534
|
1 |
|
artist = _extract(track, "artist") |
3535
|
1 |
|
date = _extract(track, "date") |
3536
|
1 |
|
album = _extract(track, "album") |
3537
|
1 |
|
timestamp = track.getElementsByTagName( |
3538
|
|
|
"date")[0].getAttribute("uts") |
3539
|
|
|
|
3540
|
1 |
|
seq.append(PlayedTrack( |
3541
|
|
|
Track(artist, title, self.network), album, date, timestamp)) |
3542
|
|
|
|
3543
|
1 |
|
return seq |
3544
|
|
|
|
3545
|
1 |
|
def get_id(self): |
3546
|
|
|
"""Returns the user ID.""" |
3547
|
|
|
|
3548
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3549
|
|
|
|
3550
|
|
|
return _extract(doc, "id") |
3551
|
|
|
|
3552
|
1 |
|
def get_language(self): |
3553
|
|
|
"""Returns the language code of the language used by the user.""" |
3554
|
|
|
|
3555
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3556
|
|
|
|
3557
|
|
|
return _extract(doc, "lang") |
3558
|
|
|
|
3559
|
1 |
|
def get_country(self): |
3560
|
|
|
"""Returns the name of the country of the user.""" |
3561
|
|
|
|
3562
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3563
|
|
|
|
3564
|
1 |
|
country = _extract(doc, "country") |
3565
|
|
|
|
3566
|
1 |
|
if country is None: |
3567
|
1 |
|
return None |
3568
|
|
|
else: |
3569
|
|
|
return Country(country, self.network) |
3570
|
|
|
|
3571
|
1 |
|
def get_age(self): |
3572
|
|
|
"""Returns the user's age.""" |
3573
|
|
|
|
3574
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3575
|
|
|
|
3576
|
|
|
return _number(_extract(doc, "age")) |
3577
|
|
|
|
3578
|
1 |
|
def get_gender(self): |
3579
|
|
|
"""Returns the user's gender. Either USER_MALE or USER_FEMALE.""" |
3580
|
|
|
|
3581
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3582
|
|
|
|
3583
|
1 |
|
value = _extract(doc, "gender") |
3584
|
|
|
|
3585
|
1 |
|
if value == 'm': |
3586
|
|
|
return USER_MALE |
3587
|
1 |
|
elif value == 'f': |
3588
|
|
|
return USER_FEMALE |
3589
|
|
|
|
3590
|
1 |
|
return None |
3591
|
|
|
|
3592
|
1 |
|
def is_subscriber(self): |
3593
|
|
|
"""Returns whether the user is a subscriber or not. True or False.""" |
3594
|
|
|
|
3595
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3596
|
|
|
|
3597
|
|
|
return _extract(doc, "subscriber") == "1" |
3598
|
|
|
|
3599
|
1 |
|
def get_playcount(self): |
3600
|
|
|
"""Returns the user's playcount so far.""" |
3601
|
|
|
|
3602
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3603
|
|
|
|
3604
|
|
|
return _number(_extract(doc, "playcount")) |
3605
|
|
|
|
3606
|
1 |
|
def get_registered(self): |
3607
|
|
|
"""Returns the user's registration date.""" |
3608
|
|
|
|
3609
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3610
|
|
|
|
3611
|
1 |
|
return _extract(doc, "registered") |
3612
|
|
|
|
3613
|
1 |
|
def get_unixtime_registered(self): |
3614
|
|
|
"""Returns the user's registration date as a UNIX timestamp.""" |
3615
|
|
|
|
3616
|
1 |
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3617
|
|
|
|
3618
|
1 |
|
return doc.getElementsByTagName( |
3619
|
|
|
"registered")[0].getAttribute("unixtime") |
3620
|
|
|
|
3621
|
1 |
|
def get_tagged_albums(self, tag, limit=None, cacheable=True): |
3622
|
|
|
"""Returns the albums tagged by a user.""" |
3623
|
|
|
|
3624
|
1 |
|
params = self._get_params() |
3625
|
1 |
|
params['tag'] = tag |
3626
|
1 |
|
params['taggingtype'] = 'album' |
3627
|
1 |
|
if limit: |
3628
|
1 |
|
params['limit'] = limit |
3629
|
1 |
|
doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, |
3630
|
|
|
params) |
3631
|
1 |
|
return _extract_albums(doc, self.network) |
3632
|
|
|
|
3633
|
1 |
|
def get_tagged_artists(self, tag, limit=None): |
3634
|
|
|
"""Returns the artists tagged by a user.""" |
3635
|
|
|
|
3636
|
1 |
|
params = self._get_params() |
3637
|
1 |
|
params['tag'] = tag |
3638
|
1 |
|
params['taggingtype'] = 'artist' |
3639
|
1 |
|
if limit: |
3640
|
1 |
|
params["limit"] = limit |
3641
|
1 |
|
doc = self._request(self.ws_prefix + '.getpersonaltags', True, params) |
3642
|
1 |
|
return _extract_artists(doc, self.network) |
3643
|
|
|
|
3644
|
1 |
|
def get_tagged_tracks(self, tag, limit=None, cacheable=True): |
3645
|
|
|
"""Returns the tracks tagged by a user.""" |
3646
|
|
|
|
3647
|
1 |
|
params = self._get_params() |
3648
|
1 |
|
params['tag'] = tag |
3649
|
1 |
|
params['taggingtype'] = 'track' |
3650
|
1 |
|
if limit: |
3651
|
1 |
|
params['limit'] = limit |
3652
|
1 |
|
doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, |
3653
|
|
|
params) |
3654
|
1 |
|
return _extract_tracks(doc, self.network) |
3655
|
|
|
|
3656
|
1 |
|
def get_top_albums( |
3657
|
|
|
self, period=PERIOD_OVERALL, limit=None, cacheable=True): |
3658
|
|
|
"""Returns the top albums played by a user. |
3659
|
|
|
* period: The period of time. Possible values: |
3660
|
|
|
o PERIOD_OVERALL |
3661
|
|
|
o PERIOD_7DAYS |
3662
|
|
|
o PERIOD_1MONTH |
3663
|
|
|
o PERIOD_3MONTHS |
3664
|
|
|
o PERIOD_6MONTHS |
3665
|
|
|
o PERIOD_12MONTHS |
3666
|
|
|
""" |
3667
|
|
|
|
3668
|
1 |
|
params = self._get_params() |
3669
|
1 |
|
params['period'] = period |
3670
|
1 |
|
if limit: |
3671
|
1 |
|
params['limit'] = limit |
3672
|
|
|
|
3673
|
1 |
|
doc = self._request( |
3674
|
|
|
self.ws_prefix + '.getTopAlbums', cacheable, params) |
3675
|
|
|
|
3676
|
1 |
|
return _extract_top_albums(doc, self.network) |
3677
|
|
|
|
3678
|
1 |
|
def get_top_artists(self, period=PERIOD_OVERALL, limit=None): |
3679
|
|
|
"""Returns the top artists played by a user. |
3680
|
|
|
* period: The period of time. Possible values: |
3681
|
|
|
o PERIOD_OVERALL |
3682
|
|
|
o PERIOD_7DAYS |
3683
|
|
|
o PERIOD_1MONTH |
3684
|
|
|
o PERIOD_3MONTHS |
3685
|
|
|
o PERIOD_6MONTHS |
3686
|
|
|
o PERIOD_12MONTHS |
3687
|
|
|
""" |
3688
|
|
|
|
3689
|
1 |
|
params = self._get_params() |
3690
|
1 |
|
params['period'] = period |
3691
|
1 |
|
if limit: |
3692
|
1 |
|
params["limit"] = limit |
3693
|
|
|
|
3694
|
1 |
|
doc = self._request(self.ws_prefix + '.getTopArtists', True, params) |
3695
|
|
|
|
3696
|
1 |
|
return _extract_top_artists(doc, self.network) |
3697
|
|
|
|
3698
|
1 |
|
def get_top_tags(self, limit=None, cacheable=True): |
3699
|
|
|
""" |
3700
|
|
|
Returns a sequence of the top tags used by this user with their counts |
3701
|
|
|
as TopItem objects. |
3702
|
|
|
* limit: The limit of how many tags to return. |
3703
|
|
|
* cacheable: Whether to cache results. |
3704
|
|
|
""" |
3705
|
|
|
|
3706
|
1 |
|
params = self._get_params() |
3707
|
1 |
|
if limit: |
3708
|
1 |
|
params["limit"] = limit |
3709
|
|
|
|
3710
|
1 |
|
doc = self._request(self.ws_prefix + ".getTopTags", cacheable, params) |
3711
|
|
|
|
3712
|
1 |
|
seq = [] |
3713
|
1 |
|
for node in doc.getElementsByTagName("tag"): |
3714
|
1 |
|
seq.append(TopItem( |
3715
|
|
|
Tag(_extract(node, "name"), self.network), |
3716
|
|
|
_extract(node, "count"))) |
3717
|
|
|
|
3718
|
1 |
|
return seq |
3719
|
|
|
|
3720
|
1 |
|
def get_top_tracks( |
3721
|
|
|
self, period=PERIOD_OVERALL, limit=None, cacheable=True): |
3722
|
|
|
"""Returns the top tracks played by a user. |
3723
|
|
|
* period: The period of time. Possible values: |
3724
|
|
|
o PERIOD_OVERALL |
3725
|
|
|
o PERIOD_7DAYS |
3726
|
|
|
o PERIOD_1MONTH |
3727
|
|
|
o PERIOD_3MONTHS |
3728
|
|
|
o PERIOD_6MONTHS |
3729
|
|
|
o PERIOD_12MONTHS |
3730
|
|
|
""" |
3731
|
|
|
|
3732
|
1 |
|
params = self._get_params() |
3733
|
1 |
|
params['period'] = period |
3734
|
1 |
|
if limit: |
3735
|
1 |
|
params['limit'] = limit |
3736
|
|
|
|
3737
|
1 |
|
return self._get_things( |
3738
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
3739
|
|
|
|
3740
|
1 |
|
def compare_with_user(self, user, shared_artists_limit=None): |
3741
|
|
|
""" |
3742
|
|
|
Compare this user with another Last.fm user. |
3743
|
|
|
Returns a sequence: |
3744
|
|
|
(tasteometer_score, (shared_artist1, shared_artist2, ...)) |
3745
|
|
|
user: A User object or a username string/unicode object. |
3746
|
|
|
""" |
3747
|
|
|
|
3748
|
|
|
if isinstance(user, User): |
3749
|
|
|
user = user.get_name() |
3750
|
|
|
|
3751
|
|
|
params = self._get_params() |
3752
|
|
|
if shared_artists_limit: |
3753
|
|
|
params['limit'] = shared_artists_limit |
3754
|
|
|
params['type1'] = 'user' |
3755
|
|
|
params['type2'] = 'user' |
3756
|
|
|
params['value1'] = self.get_name() |
3757
|
|
|
params['value2'] = user |
3758
|
|
|
|
3759
|
|
|
doc = self._request('tasteometer.compare', False, params) |
3760
|
|
|
|
3761
|
|
|
score = _extract(doc, 'score') |
3762
|
|
|
|
3763
|
|
|
artists = doc.getElementsByTagName('artists')[0] |
3764
|
|
|
shared_artists_names = _extract_all(artists, 'name') |
3765
|
|
|
|
3766
|
|
|
shared_artists_seq = [] |
3767
|
|
|
|
3768
|
|
|
for name in shared_artists_names: |
3769
|
|
|
shared_artists_seq.append(Artist(name, self.network)) |
3770
|
|
|
|
3771
|
|
|
return (score, shared_artists_seq) |
3772
|
|
|
|
3773
|
1 |
|
def get_image(self): |
3774
|
|
|
"""Returns the user's avatar.""" |
3775
|
|
|
|
3776
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3777
|
|
|
|
3778
|
|
|
return _extract(doc, "image") |
3779
|
|
|
|
3780
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3781
|
|
|
"""Returns the url of the user page on the network. |
3782
|
|
|
* domain_name: The network's language domain. Possible values: |
3783
|
|
|
o DOMAIN_ENGLISH |
3784
|
|
|
o DOMAIN_GERMAN |
3785
|
|
|
o DOMAIN_SPANISH |
3786
|
|
|
o DOMAIN_FRENCH |
3787
|
|
|
o DOMAIN_ITALIAN |
3788
|
|
|
o DOMAIN_POLISH |
3789
|
|
|
o DOMAIN_PORTUGUESE |
3790
|
|
|
o DOMAIN_SWEDISH |
3791
|
|
|
o DOMAIN_TURKISH |
3792
|
|
|
o DOMAIN_RUSSIAN |
3793
|
|
|
o DOMAIN_JAPANESE |
3794
|
|
|
o DOMAIN_CHINESE |
3795
|
|
|
""" |
3796
|
|
|
|
3797
|
|
|
name = _url_safe(self.get_name()) |
3798
|
|
|
|
3799
|
|
|
return self.network._get_url(domain_name, "user") % {'name': name} |
3800
|
|
|
|
3801
|
1 |
|
def get_library(self): |
3802
|
|
|
"""Returns the associated Library object. """ |
3803
|
|
|
|
3804
|
|
|
return Library(self, self.network) |
3805
|
|
|
|
3806
|
1 |
|
def shout(self, message): |
3807
|
|
|
""" |
3808
|
|
|
Post a shout |
3809
|
|
|
""" |
3810
|
|
|
|
3811
|
|
|
params = self._get_params() |
3812
|
|
|
params["message"] = message |
3813
|
|
|
|
3814
|
|
|
self._request(self.ws_prefix + ".Shout", False, params) |
3815
|
|
|
|
3816
|
|
|
|
3817
|
1 |
|
class AuthenticatedUser(User): |
3818
|
1 |
|
def __init__(self, network): |
3819
|
1 |
|
User.__init__(self, "", network) |
3820
|
|
|
|
3821
|
1 |
|
def _get_params(self): |
3822
|
1 |
|
return {"user": self.get_name()} |
3823
|
|
|
|
3824
|
1 |
|
def get_name(self): |
3825
|
|
|
"""Returns the name of the authenticated user.""" |
3826
|
|
|
|
3827
|
1 |
|
doc = self._request("user.getInfo", True, {"user": ""}) # hack |
3828
|
|
|
|
3829
|
1 |
|
self.name = _extract(doc, "name") |
3830
|
1 |
|
return self.name |
3831
|
|
|
|
3832
|
1 |
|
def get_recommended_events(self, limit=50, cacheable=False): |
3833
|
|
|
""" |
3834
|
|
|
Returns a sequence of Event objects |
3835
|
|
|
if limit==None it will return all |
3836
|
|
|
""" |
3837
|
|
|
|
3838
|
|
|
seq = [] |
3839
|
|
|
for node in _collect_nodes( |
3840
|
|
|
limit, self, "user.getRecommendedEvents", cacheable): |
3841
|
|
|
seq.append(Event(_extract(node, "id"), self.network)) |
3842
|
|
|
|
3843
|
|
|
return seq |
3844
|
|
|
|
3845
|
1 |
|
def get_recommended_artists(self, limit=50, cacheable=False): |
3846
|
|
|
""" |
3847
|
|
|
Returns a sequence of Artist objects |
3848
|
|
|
if limit==None it will return all |
3849
|
|
|
""" |
3850
|
|
|
|
3851
|
|
|
seq = [] |
3852
|
|
|
for node in _collect_nodes( |
3853
|
|
|
limit, self, "user.getRecommendedArtists", cacheable): |
3854
|
|
|
seq.append(Artist(_extract(node, "name"), self.network)) |
3855
|
|
|
|
3856
|
|
|
return seq |
3857
|
|
|
|
3858
|
|
|
|
3859
|
1 |
|
class _Search(_BaseObject): |
3860
|
|
|
"""An abstract class. Use one of its derivatives.""" |
3861
|
|
|
|
3862
|
1 |
|
def __init__(self, ws_prefix, search_terms, network): |
3863
|
1 |
|
_BaseObject.__init__(self, network, ws_prefix) |
3864
|
|
|
|
3865
|
1 |
|
self._ws_prefix = ws_prefix |
3866
|
1 |
|
self.search_terms = search_terms |
3867
|
|
|
|
3868
|
1 |
|
self._last_page_index = 0 |
3869
|
|
|
|
3870
|
1 |
|
def _get_params(self): |
3871
|
1 |
|
params = {} |
3872
|
|
|
|
3873
|
1 |
|
for key in self.search_terms.keys(): |
3874
|
1 |
|
params[key] = self.search_terms[key] |
3875
|
|
|
|
3876
|
1 |
|
return params |
3877
|
|
|
|
3878
|
1 |
|
def get_total_result_count(self): |
3879
|
|
|
"""Returns the total count of all the results.""" |
3880
|
|
|
|
3881
|
1 |
|
doc = self._request(self._ws_prefix + ".search", True) |
3882
|
|
|
|
3883
|
1 |
|
return _extract(doc, "opensearch:totalResults") |
3884
|
|
|
|
3885
|
1 |
|
def _retrieve_page(self, page_index): |
3886
|
|
|
"""Returns the node of matches to be processed""" |
3887
|
|
|
|
3888
|
1 |
|
params = self._get_params() |
3889
|
1 |
|
params["page"] = str(page_index) |
3890
|
1 |
|
doc = self._request(self._ws_prefix + ".search", True, params) |
3891
|
|
|
|
3892
|
|
|
return doc.getElementsByTagName(self._ws_prefix + "matches")[0] |
3893
|
|
|
|
3894
|
1 |
|
def _retrieve_next_page(self): |
3895
|
1 |
|
self._last_page_index += 1 |
3896
|
1 |
|
return self._retrieve_page(self._last_page_index) |
3897
|
|
|
|
3898
|
|
|
|
3899
|
1 |
|
class AlbumSearch(_Search): |
3900
|
|
|
"""Search for an album by name.""" |
3901
|
|
|
|
3902
|
1 |
|
def __init__(self, album_name, network): |
3903
|
|
|
|
3904
|
|
|
_Search.__init__(self, "album", {"album": album_name}, network) |
3905
|
|
|
|
3906
|
1 |
|
def get_next_page(self): |
3907
|
|
|
"""Returns the next page of results as a sequence of Album objects.""" |
3908
|
|
|
|
3909
|
|
|
master_node = self._retrieve_next_page() |
3910
|
|
|
|
3911
|
|
|
seq = [] |
3912
|
|
|
for node in master_node.getElementsByTagName("album"): |
3913
|
|
|
seq.append(Album( |
3914
|
|
|
_extract(node, "artist"), |
3915
|
|
|
_extract(node, "name"), |
3916
|
|
|
self.network)) |
3917
|
|
|
|
3918
|
|
|
return seq |
3919
|
|
|
|
3920
|
|
|
|
3921
|
1 |
|
class ArtistSearch(_Search): |
3922
|
|
|
"""Search for an artist by artist name.""" |
3923
|
|
|
|
3924
|
1 |
|
def __init__(self, artist_name, network): |
3925
|
|
|
_Search.__init__(self, "artist", {"artist": artist_name}, network) |
3926
|
|
|
|
3927
|
1 |
|
def get_next_page(self): |
3928
|
|
|
"""Returns the next page of results as a sequence of Artist objects.""" |
3929
|
|
|
|
3930
|
|
|
master_node = self._retrieve_next_page() |
3931
|
|
|
|
3932
|
|
|
seq = [] |
3933
|
|
|
for node in master_node.getElementsByTagName("artist"): |
3934
|
|
|
artist = Artist(_extract(node, "name"), self.network) |
3935
|
|
|
artist.listener_count = _number(_extract(node, "listeners")) |
3936
|
|
|
seq.append(artist) |
3937
|
|
|
|
3938
|
|
|
return seq |
3939
|
|
|
|
3940
|
|
|
|
3941
|
1 |
|
class TagSearch(_Search): |
3942
|
|
|
"""Search for a tag by tag name.""" |
3943
|
|
|
|
3944
|
1 |
|
def __init__(self, tag_name, network): |
3945
|
|
|
|
3946
|
|
|
_Search.__init__(self, "tag", {"tag": tag_name}, network) |
3947
|
|
|
|
3948
|
1 |
|
def get_next_page(self): |
3949
|
|
|
"""Returns the next page of results as a sequence of Tag objects.""" |
3950
|
|
|
|
3951
|
|
|
master_node = self._retrieve_next_page() |
3952
|
|
|
|
3953
|
|
|
seq = [] |
3954
|
|
|
for node in master_node.getElementsByTagName("tag"): |
3955
|
|
|
tag = Tag(_extract(node, "name"), self.network) |
3956
|
|
|
tag.tag_count = _number(_extract(node, "count")) |
3957
|
|
|
seq.append(tag) |
3958
|
|
|
|
3959
|
|
|
return seq |
3960
|
|
|
|
3961
|
|
|
|
3962
|
1 |
|
class TrackSearch(_Search): |
3963
|
|
|
""" |
3964
|
|
|
Search for a track by track title. If you don't want to narrow the results |
3965
|
|
|
down by specifying the artist name, set it to empty string. |
3966
|
|
|
""" |
3967
|
|
|
|
3968
|
1 |
|
def __init__(self, artist_name, track_title, network): |
3969
|
|
|
|
3970
|
1 |
|
_Search.__init__( |
3971
|
|
|
self, |
3972
|
|
|
"track", |
3973
|
|
|
{"track": track_title, "artist": artist_name}, |
3974
|
|
|
network) |
3975
|
|
|
|
3976
|
1 |
|
def get_next_page(self): |
3977
|
|
|
"""Returns the next page of results as a sequence of Track objects.""" |
3978
|
|
|
|
3979
|
|
|
master_node = self._retrieve_next_page() |
3980
|
|
|
|
3981
|
|
|
seq = [] |
3982
|
|
|
for node in master_node.getElementsByTagName("track"): |
3983
|
|
|
track = Track( |
3984
|
|
|
_extract(node, "artist"), |
3985
|
|
|
_extract(node, "name"), |
3986
|
|
|
self.network) |
3987
|
|
|
track.listener_count = _number(_extract(node, "listeners")) |
3988
|
|
|
seq.append(track) |
3989
|
|
|
|
3990
|
|
|
return seq |
3991
|
|
|
|
3992
|
|
|
|
3993
|
1 |
|
class VenueSearch(_Search): |
3994
|
|
|
""" |
3995
|
|
|
Search for a venue by its name. If you don't want to narrow the results |
3996
|
|
|
down by specifying a country, set it to empty string. |
3997
|
|
|
""" |
3998
|
|
|
|
3999
|
1 |
|
def __init__(self, venue_name, country_name, network): |
4000
|
|
|
|
4001
|
1 |
|
_Search.__init__( |
4002
|
|
|
self, |
4003
|
|
|
"venue", |
4004
|
|
|
{"venue": venue_name, "country": country_name}, |
4005
|
|
|
network) |
4006
|
|
|
|
4007
|
1 |
|
def get_next_page(self): |
4008
|
|
|
"""Returns the next page of results as a sequence of Track objects.""" |
4009
|
|
|
|
4010
|
1 |
|
master_node = self._retrieve_next_page() |
4011
|
|
|
|
4012
|
|
|
seq = [] |
4013
|
|
|
for node in master_node.getElementsByTagName("venue"): |
4014
|
|
|
seq.append(Venue(_extract(node, "id"), self.network)) |
4015
|
|
|
|
4016
|
|
|
return seq |
4017
|
|
|
|
4018
|
|
|
|
4019
|
1 |
|
class Venue(_BaseObject): |
4020
|
|
|
"""A venue where events are held.""" |
4021
|
|
|
|
4022
|
|
|
# TODO: waiting for a venue.getInfo web service to use. |
4023
|
|
|
# TODO: As an intermediate use case, can pass the venue DOM element when |
4024
|
|
|
# using Event.get_venue() to populate the venue info, if the venue.getInfo |
4025
|
|
|
# API call becomes available this workaround should be removed |
4026
|
|
|
|
4027
|
1 |
|
id = None |
4028
|
1 |
|
info = None |
4029
|
1 |
|
name = None |
4030
|
1 |
|
location = None |
4031
|
1 |
|
url = None |
4032
|
|
|
|
4033
|
1 |
|
__hash__ = _BaseObject.__hash__ |
4034
|
|
|
|
4035
|
1 |
|
def __init__(self, netword_id, network, venue_element=None): |
4036
|
1 |
|
_BaseObject.__init__(self, network, "venue") |
4037
|
|
|
|
4038
|
1 |
|
self.id = _number(netword_id) |
4039
|
1 |
|
if venue_element is not None: |
4040
|
|
|
self.info = _extract_element_tree(venue_element) |
4041
|
|
|
self.name = self.info.get('name') |
4042
|
|
|
self.url = self.info.get('url') |
4043
|
|
|
self.location = self.info.get('location') |
4044
|
|
|
|
4045
|
1 |
|
def __repr__(self): |
4046
|
|
|
return "pylast.Venue(%s, %s)" % (repr(self.id), repr(self.network)) |
4047
|
|
|
|
4048
|
1 |
|
@_string_output |
4049
|
|
|
def __str__(self): |
4050
|
|
|
return "Venue #" + str(self.id) |
4051
|
|
|
|
4052
|
1 |
|
def __eq__(self, other): |
4053
|
|
|
return self.get_id() == other.get_id() |
4054
|
|
|
|
4055
|
1 |
|
def _get_params(self): |
4056
|
1 |
|
return {self.ws_prefix: self.get_id()} |
4057
|
|
|
|
4058
|
1 |
|
def get_id(self): |
4059
|
|
|
"""Returns the id of the venue.""" |
4060
|
|
|
|
4061
|
1 |
|
return self.id |
4062
|
|
|
|
4063
|
1 |
|
def get_name(self): |
4064
|
|
|
"""Returns the name of the venue.""" |
4065
|
|
|
|
4066
|
|
|
return self.name |
4067
|
|
|
|
4068
|
1 |
|
def get_url(self): |
4069
|
|
|
"""Returns the URL of the venue page.""" |
4070
|
|
|
|
4071
|
|
|
return self.url |
4072
|
|
|
|
4073
|
1 |
|
def get_location(self): |
4074
|
|
|
"""Returns the location of the venue (dictionary).""" |
4075
|
|
|
|
4076
|
|
|
return self.location |
4077
|
|
|
|
4078
|
1 |
|
def get_upcoming_events(self): |
4079
|
|
|
"""Returns the upcoming events in this venue.""" |
4080
|
|
|
|
4081
|
1 |
|
doc = self._request(self.ws_prefix + ".getEvents", True) |
4082
|
|
|
|
4083
|
|
|
return _extract_events_from_doc(doc, self.network) |
4084
|
|
|
|
4085
|
1 |
|
def get_past_events(self): |
4086
|
|
|
"""Returns the past events held in this venue.""" |
4087
|
|
|
|
4088
|
1 |
|
doc = self._request(self.ws_prefix + ".getEvents", True) |
4089
|
|
|
|
4090
|
|
|
return _extract_events_from_doc(doc, self.network) |
4091
|
|
|
|
4092
|
|
|
|
4093
|
1 |
|
def md5(text): |
4094
|
|
|
"""Returns the md5 hash of a string.""" |
4095
|
|
|
|
4096
|
1 |
|
h = hashlib.md5() |
4097
|
1 |
|
h.update(_unicode(text).encode("utf-8")) |
4098
|
|
|
|
4099
|
1 |
|
return h.hexdigest() |
4100
|
|
|
|
4101
|
|
|
|
4102
|
1 |
|
def _unicode(text): |
4103
|
1 |
|
if isinstance(text, six.binary_type): |
4104
|
1 |
|
return six.text_type(text, "utf-8") |
4105
|
1 |
|
elif isinstance(text, six.text_type): |
4106
|
1 |
|
return text |
4107
|
|
|
else: |
4108
|
1 |
|
return six.text_type(text) |
4109
|
|
|
|
4110
|
|
|
|
4111
|
1 |
|
def _string(string): |
4112
|
|
|
"""For Python2 routines that can only process str type.""" |
4113
|
1 |
|
if isinstance(string, str): |
4114
|
1 |
|
return string |
4115
|
1 |
|
casted = six.text_type(string) |
4116
|
1 |
|
if sys.version_info[0] == 2: |
4117
|
1 |
|
casted = casted.encode("utf-8") |
4118
|
1 |
|
return casted |
4119
|
|
|
|
4120
|
|
|
|
4121
|
1 |
|
def cleanup_nodes(doc): |
4122
|
|
|
""" |
4123
|
|
|
Remove text nodes containing only whitespace |
4124
|
|
|
""" |
4125
|
1 |
|
for node in doc.documentElement.childNodes: |
4126
|
1 |
|
if node.nodeType == Node.TEXT_NODE and node.nodeValue.isspace(): |
4127
|
1 |
|
doc.documentElement.removeChild(node) |
4128
|
1 |
|
return doc |
4129
|
|
|
|
4130
|
|
|
|
4131
|
1 |
|
def _collect_nodes(limit, sender, method_name, cacheable, params=None): |
4132
|
|
|
""" |
4133
|
|
|
Returns a sequence of dom.Node objects about as close to limit as possible |
4134
|
|
|
""" |
4135
|
|
|
|
4136
|
1 |
|
if not params: |
4137
|
1 |
|
params = sender._get_params() |
4138
|
|
|
|
4139
|
1 |
|
nodes = [] |
4140
|
1 |
|
page = 1 |
4141
|
1 |
|
end_of_pages = False |
4142
|
|
|
|
4143
|
1 |
|
while not end_of_pages and (not limit or (limit and len(nodes) < limit)): |
4144
|
1 |
|
params["page"] = str(page) |
4145
|
1 |
|
doc = sender._request(method_name, cacheable, params) |
4146
|
1 |
|
doc = cleanup_nodes(doc) |
4147
|
|
|
|
4148
|
1 |
|
main = doc.documentElement.childNodes[0] |
4149
|
|
|
|
4150
|
1 |
|
if main.hasAttribute("totalPages"): |
4151
|
1 |
|
total_pages = _number(main.getAttribute("totalPages")) |
4152
|
|
|
elif main.hasAttribute("totalpages"): |
4153
|
|
|
total_pages = _number(main.getAttribute("totalpages")) |
4154
|
|
|
else: |
4155
|
|
|
raise Exception("No total pages attribute") |
4156
|
|
|
|
4157
|
1 |
|
for node in main.childNodes: |
4158
|
1 |
|
if not node.nodeType == xml.dom.Node.TEXT_NODE and ( |
4159
|
|
|
not limit or (len(nodes) < limit)): |
4160
|
1 |
|
nodes.append(node) |
4161
|
|
|
|
4162
|
1 |
|
if page >= total_pages: |
4163
|
1 |
|
end_of_pages = True |
4164
|
|
|
|
4165
|
1 |
|
page += 1 |
4166
|
|
|
|
4167
|
1 |
|
return nodes |
4168
|
|
|
|
4169
|
|
|
|
4170
|
1 |
|
def _extract(node, name, index=0): |
4171
|
|
|
"""Extracts a value from the xml string""" |
4172
|
|
|
|
4173
|
1 |
|
nodes = node.getElementsByTagName(name) |
4174
|
|
|
|
4175
|
1 |
|
if len(nodes): |
4176
|
1 |
|
if nodes[index].firstChild: |
4177
|
1 |
|
return _unescape_htmlentity(nodes[index].firstChild.data.strip()) |
4178
|
|
|
else: |
4179
|
1 |
|
return None |
4180
|
|
|
|
4181
|
|
|
|
4182
|
1 |
|
def _extract_element_tree(node): |
4183
|
|
|
"""Extract an element tree into a multi-level dictionary |
4184
|
|
|
|
4185
|
|
|
NB: If any elements have text nodes as well as nested |
4186
|
|
|
elements this will ignore the text nodes""" |
4187
|
|
|
|
4188
|
|
|
def _recurse_build_tree(rootNode, targetDict): |
4189
|
|
|
"""Recursively build a multi-level dict""" |
4190
|
|
|
|
4191
|
|
|
def _has_child_elements(rootNode): |
4192
|
|
|
"""Check if an element has any nested (child) elements""" |
4193
|
|
|
|
4194
|
|
|
for node in rootNode.childNodes: |
4195
|
|
|
if node.nodeType == node.ELEMENT_NODE: |
4196
|
|
|
return True |
4197
|
|
|
return False |
4198
|
|
|
|
4199
|
|
|
for node in rootNode.childNodes: |
4200
|
|
|
if node.nodeType == node.ELEMENT_NODE: |
4201
|
|
|
if _has_child_elements(node): |
4202
|
|
|
targetDict[node.tagName] = {} |
4203
|
|
|
_recurse_build_tree(node, targetDict[node.tagName]) |
4204
|
|
|
else: |
4205
|
|
|
val = None if node.firstChild is None else \ |
4206
|
|
|
_unescape_htmlentity(node.firstChild.data.strip()) |
4207
|
|
|
targetDict[node.tagName] = val |
4208
|
|
|
return targetDict |
4209
|
|
|
|
4210
|
|
|
return _recurse_build_tree(node, {}) |
4211
|
|
|
|
4212
|
|
|
|
4213
|
1 |
|
def _extract_all(node, name, limit_count=None): |
4214
|
|
|
"""Extracts all the values from the xml string. returning a list.""" |
4215
|
|
|
|
4216
|
1 |
|
seq = [] |
4217
|
|
|
|
4218
|
1 |
|
for i in range(0, len(node.getElementsByTagName(name))): |
4219
|
1 |
|
if len(seq) == limit_count: |
4220
|
|
|
break |
4221
|
|
|
|
4222
|
1 |
|
seq.append(_extract(node, name, i)) |
4223
|
|
|
|
4224
|
1 |
|
return seq |
4225
|
|
|
|
4226
|
|
|
|
4227
|
1 |
|
def _extract_top_artists(doc, network): |
4228
|
|
|
# TODO Maybe include the _request here too? |
4229
|
1 |
|
seq = [] |
4230
|
1 |
|
for node in doc.getElementsByTagName("artist"): |
4231
|
1 |
|
name = _extract(node, "name") |
4232
|
1 |
|
playcount = _extract(node, "playcount") |
4233
|
|
|
|
4234
|
1 |
|
seq.append(TopItem(Artist(name, network), playcount)) |
4235
|
|
|
|
4236
|
1 |
|
return seq |
4237
|
|
|
|
4238
|
|
|
|
4239
|
1 |
|
def _extract_top_albums(doc, network): |
4240
|
|
|
# TODO Maybe include the _request here too? |
4241
|
1 |
|
seq = [] |
4242
|
1 |
|
for node in doc.getElementsByTagName("album"): |
4243
|
1 |
|
name = _extract(node, "name") |
4244
|
1 |
|
artist = _extract(node, "name", 1) |
4245
|
1 |
|
playcount = _extract(node, "playcount") |
4246
|
|
|
|
4247
|
1 |
|
seq.append(TopItem(Album(artist, name, network), playcount)) |
4248
|
|
|
|
4249
|
1 |
|
return seq |
4250
|
|
|
|
4251
|
|
|
|
4252
|
1 |
|
def _extract_artists(doc, network): |
4253
|
1 |
|
seq = [] |
4254
|
1 |
|
for node in doc.getElementsByTagName("artist"): |
4255
|
1 |
|
seq.append(Artist(_extract(node, "name"), network)) |
4256
|
1 |
|
return seq |
4257
|
|
|
|
4258
|
|
|
|
4259
|
1 |
|
def _extract_albums(doc, network): |
4260
|
1 |
|
seq = [] |
4261
|
1 |
|
for node in doc.getElementsByTagName("album"): |
4262
|
1 |
|
name = _extract(node, "name") |
4263
|
1 |
|
artist = _extract(node, "name", 1) |
4264
|
1 |
|
seq.append(Album(artist, name, network)) |
4265
|
1 |
|
return seq |
4266
|
|
|
|
4267
|
|
|
|
4268
|
1 |
|
def _extract_tracks(doc, network): |
4269
|
1 |
|
seq = [] |
4270
|
1 |
|
for node in doc.getElementsByTagName("track"): |
4271
|
1 |
|
name = _extract(node, "name") |
4272
|
1 |
|
artist = _extract(node, "name", 1) |
4273
|
1 |
|
seq.append(Track(artist, name, network)) |
4274
|
1 |
|
return seq |
4275
|
|
|
|
4276
|
|
|
|
4277
|
1 |
|
def _extract_events_from_doc(doc, network): |
4278
|
|
|
events = [] |
4279
|
|
|
for node in doc.getElementsByTagName("event"): |
4280
|
|
|
events.append(Event(_extract(node, "id"), network)) |
4281
|
|
|
return events |
4282
|
|
|
|
4283
|
|
|
|
4284
|
1 |
|
def _url_safe(text): |
4285
|
|
|
"""Does all kinds of tricks on a text to make it safe to use in a url.""" |
4286
|
|
|
|
4287
|
1 |
|
return url_quote_plus(url_quote_plus(_string(text))).lower() |
4288
|
|
|
|
4289
|
|
|
|
4290
|
1 |
|
def _number(string): |
4291
|
|
|
""" |
4292
|
|
|
Extracts an int from a string. |
4293
|
|
|
Returns a 0 if None or an empty string was passed. |
4294
|
|
|
""" |
4295
|
|
|
|
4296
|
1 |
|
if not string: |
4297
|
1 |
|
return 0 |
4298
|
1 |
|
elif string == "": |
4299
|
|
|
return 0 |
4300
|
|
|
else: |
4301
|
1 |
|
try: |
4302
|
1 |
|
return int(string) |
4303
|
1 |
|
except ValueError: |
4304
|
1 |
|
return float(string) |
4305
|
|
|
|
4306
|
|
|
|
4307
|
1 |
|
def _unescape_htmlentity(string): |
4308
|
|
|
|
4309
|
|
|
# string = _unicode(string) |
4310
|
|
|
|
4311
|
1 |
|
mapping = htmlentitydefs.name2codepoint |
4312
|
1 |
|
for key in mapping: |
4313
|
1 |
|
string = string.replace("&%s;" % key, unichr(mapping[key])) |
4314
|
|
|
|
4315
|
1 |
|
return string |
4316
|
|
|
|
4317
|
|
|
|
4318
|
1 |
|
def extract_items(topitems_or_libraryitems): |
4319
|
|
|
""" |
4320
|
|
|
Extracts a sequence of items from a sequence of TopItem or |
4321
|
|
|
LibraryItem objects. |
4322
|
|
|
""" |
4323
|
|
|
|
4324
|
|
|
seq = [] |
4325
|
|
|
for i in topitems_or_libraryitems: |
4326
|
|
|
seq.append(i.item) |
4327
|
|
|
|
4328
|
|
|
return seq |
4329
|
|
|
|
4330
|
|
|
|
4331
|
1 |
|
class ScrobblingError(Exception): |
4332
|
1 |
|
def __init__(self, message): |
4333
|
|
|
Exception.__init__(self) |
4334
|
|
|
self.message = message |
4335
|
|
|
|
4336
|
1 |
|
@_string_output |
4337
|
|
|
def __str__(self): |
4338
|
|
|
return self.message |
4339
|
|
|
|
4340
|
|
|
|
4341
|
1 |
|
class BannedClientError(ScrobblingError): |
4342
|
1 |
|
def __init__(self): |
4343
|
|
|
ScrobblingError.__init__( |
4344
|
|
|
self, "This version of the client has been banned") |
4345
|
|
|
|
4346
|
|
|
|
4347
|
1 |
|
class BadAuthenticationError(ScrobblingError): |
4348
|
1 |
|
def __init__(self): |
4349
|
|
|
ScrobblingError.__init__(self, "Bad authentication token") |
4350
|
|
|
|
4351
|
|
|
|
4352
|
1 |
|
class BadTimeError(ScrobblingError): |
4353
|
1 |
|
def __init__(self): |
4354
|
|
|
ScrobblingError.__init__( |
4355
|
|
|
self, "Time provided is not close enough to current time") |
4356
|
|
|
|
4357
|
|
|
|
4358
|
1 |
|
class BadSessionError(ScrobblingError): |
4359
|
1 |
|
def __init__(self): |
4360
|
|
|
ScrobblingError.__init__( |
4361
|
|
|
self, "Bad session id, consider re-handshaking") |
4362
|
|
|
|
4363
|
|
|
|
4364
|
1 |
|
class _ScrobblerRequest(object): |
4365
|
|
|
|
4366
|
1 |
|
def __init__(self, url, params, network, request_type="POST"): |
4367
|
|
|
|
4368
|
|
|
for key in params: |
4369
|
|
|
params[key] = str(params[key]) |
4370
|
|
|
|
4371
|
|
|
self.params = params |
4372
|
|
|
self.type = request_type |
4373
|
|
|
(self.hostname, self.subdir) = url_split_host(url[len("http:"):]) |
4374
|
|
|
self.network = network |
4375
|
|
|
|
4376
|
1 |
|
def execute(self): |
4377
|
|
|
"""Returns a string response of this request.""" |
4378
|
|
|
|
4379
|
|
|
if _can_use_ssl_securely(): |
4380
|
|
|
connection = HTTPSConnection( |
4381
|
|
|
context=SSL_CONTEXT, |
4382
|
|
|
host=self.hostname |
4383
|
|
|
) |
4384
|
|
|
else: |
4385
|
|
|
connection = HTTPConnection( |
4386
|
|
|
host=self.hostname |
4387
|
|
|
) |
4388
|
|
|
|
4389
|
|
|
data = [] |
4390
|
|
|
for name in self.params.keys(): |
4391
|
|
|
value = url_quote_plus(self.params[name]) |
4392
|
|
|
data.append('='.join((name, value))) |
4393
|
|
|
data = "&".join(data) |
4394
|
|
|
|
4395
|
|
|
headers = { |
4396
|
|
|
"Content-type": "application/x-www-form-urlencoded", |
4397
|
|
|
"Accept-Charset": "utf-8", |
4398
|
|
|
"User-Agent": "pylast" + "/" + __version__, |
4399
|
|
|
"HOST": self.hostname |
4400
|
|
|
} |
4401
|
|
|
|
4402
|
|
|
if self.type == "GET": |
4403
|
|
|
connection.request( |
4404
|
|
|
"GET", self.subdir + "?" + data, headers=headers) |
4405
|
|
|
else: |
4406
|
|
|
connection.request("POST", self.subdir, data, headers) |
4407
|
|
|
response = _unicode(connection.getresponse().read()) |
4408
|
|
|
|
4409
|
|
|
self._check_response_for_errors(response) |
4410
|
|
|
|
4411
|
|
|
return response |
4412
|
|
|
|
4413
|
1 |
|
def _check_response_for_errors(self, response): |
4414
|
|
|
""" |
4415
|
|
|
When passed a string response it checks for errors, raising any |
4416
|
|
|
exceptions as necessary. |
4417
|
|
|
""" |
4418
|
|
|
|
4419
|
|
|
lines = response.split("\n") |
4420
|
|
|
status_line = lines[0] |
4421
|
|
|
|
4422
|
|
|
if status_line == "OK": |
4423
|
|
|
return |
4424
|
|
|
elif status_line == "BANNED": |
4425
|
|
|
raise BannedClientError() |
4426
|
|
|
elif status_line == "BADAUTH": |
4427
|
|
|
raise BadAuthenticationError() |
4428
|
|
|
elif status_line == "BADTIME": |
4429
|
|
|
raise BadTimeError() |
4430
|
|
|
elif status_line == "BADSESSION": |
4431
|
|
|
raise BadSessionError() |
4432
|
|
|
elif status_line.startswith("FAILED "): |
4433
|
|
|
reason = status_line[status_line.find("FAILED ") + len("FAILED "):] |
4434
|
|
|
raise ScrobblingError(reason) |
4435
|
|
|
|
4436
|
|
|
|
4437
|
1 |
|
class Scrobbler(object): |
4438
|
|
|
"""A class for scrobbling tracks to Last.fm""" |
4439
|
|
|
|
4440
|
1 |
|
session_id = None |
4441
|
1 |
|
nowplaying_url = None |
4442
|
1 |
|
submissions_url = None |
4443
|
|
|
|
4444
|
1 |
|
def __init__(self, network, client_id, client_version): |
4445
|
|
|
self.client_id = client_id |
4446
|
|
|
self.client_version = client_version |
4447
|
|
|
self.username = network.username |
4448
|
|
|
self.password = network.password_hash |
4449
|
|
|
self.network = network |
4450
|
|
|
|
4451
|
1 |
|
def _do_handshake(self): |
4452
|
|
|
"""Handshakes with the server""" |
4453
|
|
|
|
4454
|
|
|
timestamp = str(int(time.time())) |
4455
|
|
|
|
4456
|
|
|
if self.password and self.username: |
4457
|
|
|
token = md5(self.password + timestamp) |
4458
|
|
|
elif self.network.api_key and self.network.api_secret and \ |
4459
|
|
|
self.network.session_key: |
4460
|
|
|
if not self.username: |
4461
|
|
|
self.username = self.network.get_authenticated_user()\ |
4462
|
|
|
.get_name() |
4463
|
|
|
token = md5(self.network.api_secret + timestamp) |
4464
|
|
|
|
4465
|
|
|
params = { |
4466
|
|
|
"hs": "true", "p": "1.2.1", "c": self.client_id, |
4467
|
|
|
"v": self.client_version, "u": self.username, "t": timestamp, |
4468
|
|
|
"a": token} |
4469
|
|
|
|
4470
|
|
|
if self.network.session_key and self.network.api_key: |
4471
|
|
|
params["sk"] = self.network.session_key |
4472
|
|
|
params["api_key"] = self.network.api_key |
4473
|
|
|
|
4474
|
|
|
server = self.network.submission_server |
4475
|
|
|
response = _ScrobblerRequest( |
4476
|
|
|
server, params, self.network, "GET").execute().split("\n") |
4477
|
|
|
|
4478
|
|
|
self.session_id = response[1] |
4479
|
|
|
self.nowplaying_url = response[2] |
4480
|
|
|
self.submissions_url = response[3] |
4481
|
|
|
|
4482
|
1 |
|
def _get_session_id(self, new=False): |
4483
|
|
|
""" |
4484
|
|
|
Returns a handshake. If new is true, then it will be requested from |
4485
|
|
|
the server even if one was cached. |
4486
|
|
|
""" |
4487
|
|
|
|
4488
|
|
|
if not self.session_id or new: |
4489
|
|
|
self._do_handshake() |
4490
|
|
|
|
4491
|
|
|
return self.session_id |
4492
|
|
|
|
4493
|
1 |
|
def report_now_playing( |
4494
|
|
|
self, artist, title, album="", duration="", track_number="", |
4495
|
|
|
mbid=""): |
4496
|
|
|
|
4497
|
|
|
_deprecation_warning( |
4498
|
|
|
"DeprecationWarning: Use Network.update_now_playing(...) instead") |
4499
|
|
|
|
4500
|
|
|
params = { |
4501
|
|
|
"s": self._get_session_id(), "a": artist, "t": title, |
4502
|
|
|
"b": album, "l": duration, "n": track_number, "m": mbid} |
4503
|
|
|
|
4504
|
|
|
try: |
4505
|
|
|
_ScrobblerRequest( |
4506
|
|
|
self.nowplaying_url, params, self.network |
4507
|
|
|
).execute() |
4508
|
|
|
except BadSessionError: |
4509
|
|
|
self._do_handshake() |
4510
|
|
|
self.report_now_playing( |
4511
|
|
|
artist, title, album, duration, track_number, mbid) |
4512
|
|
|
|
4513
|
1 |
|
def scrobble( |
4514
|
|
|
self, artist, title, time_started, source, mode, duration, |
4515
|
|
|
album="", track_number="", mbid=""): |
4516
|
|
|
"""Scrobble a track. parameters: |
4517
|
|
|
artist: Artist name. |
4518
|
|
|
title: Track title. |
4519
|
|
|
time_started: UTC timestamp of when the track started playing. |
4520
|
|
|
source: The source of the track |
4521
|
|
|
SCROBBLE_SOURCE_USER: Chosen by the user |
4522
|
|
|
(the most common value, unless you have a reason for |
4523
|
|
|
choosing otherwise, use this). |
4524
|
|
|
SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST: Non-personalised |
4525
|
|
|
broadcast (e.g. Shoutcast, BBC Radio 1). |
4526
|
|
|
SCROBBLE_SOURCE_PERSONALIZED_BROADCAST: Personalised |
4527
|
|
|
recommendation except Last.fm (e.g. Pandora, Launchcast). |
4528
|
|
|
SCROBBLE_SOURCE_LASTFM: ast.fm (any mode). In this case, the |
4529
|
|
|
5-digit recommendation_key value must be set. |
4530
|
|
|
SCROBBLE_SOURCE_UNKNOWN: Source unknown. |
4531
|
|
|
mode: The submission mode |
4532
|
|
|
SCROBBLE_MODE_PLAYED: The track was played. |
4533
|
|
|
SCROBBLE_MODE_LOVED: The user manually loved the track |
4534
|
|
|
(implies a listen) |
4535
|
|
|
SCROBBLE_MODE_SKIPPED: The track was skipped |
4536
|
|
|
(Only if source was Last.fm) |
4537
|
|
|
SCROBBLE_MODE_BANNED: The track was banned |
4538
|
|
|
(Only if source was Last.fm) |
4539
|
|
|
duration: Track duration in seconds. |
4540
|
|
|
album: The album name. |
4541
|
|
|
track_number: The track number on the album. |
4542
|
|
|
mbid: MusicBrainz ID. |
4543
|
|
|
""" |
4544
|
|
|
|
4545
|
|
|
_deprecation_warning( |
4546
|
|
|
"DeprecationWarning: Use Network.scrobble(...) instead") |
4547
|
|
|
|
4548
|
|
|
params = { |
4549
|
|
|
"s": self._get_session_id(), |
4550
|
|
|
"a[0]": _string(artist), |
4551
|
|
|
"t[0]": _string(title), |
4552
|
|
|
"i[0]": str(time_started), |
4553
|
|
|
"o[0]": source, |
4554
|
|
|
"r[0]": mode, |
4555
|
|
|
"l[0]": str(duration), |
4556
|
|
|
"b[0]": _string(album), |
4557
|
|
|
"n[0]": track_number, |
4558
|
|
|
"m[0]": mbid |
4559
|
|
|
} |
4560
|
|
|
|
4561
|
|
|
_ScrobblerRequest(self.submissions_url, params, self.network).execute() |
4562
|
|
|
|
4563
|
1 |
|
def scrobble_many(self, tracks): |
4564
|
|
|
""" |
4565
|
|
|
Scrobble several tracks at once. |
4566
|
|
|
|
4567
|
|
|
tracks: A sequence of a sequence of parameters for each track. |
4568
|
|
|
The order of parameters is the same as if passed to the |
4569
|
|
|
scrobble() method. |
4570
|
|
|
""" |
4571
|
|
|
|
4572
|
|
|
_deprecation_warning( |
4573
|
|
|
"DeprecationWarning: Use Network.scrobble_many(...) instead") |
4574
|
|
|
|
4575
|
|
|
remainder = [] |
4576
|
|
|
|
4577
|
|
|
if len(tracks) > 50: |
4578
|
|
|
remainder = tracks[50:] |
4579
|
|
|
tracks = tracks[:50] |
4580
|
|
|
|
4581
|
|
|
params = {"s": self._get_session_id()} |
4582
|
|
|
|
4583
|
|
|
i = 0 |
4584
|
|
|
for t in tracks: |
4585
|
|
|
_pad_list(t, 9, "") |
4586
|
|
|
params["a[%s]" % str(i)] = _string(t[0]) |
4587
|
|
|
params["t[%s]" % str(i)] = _string(t[1]) |
4588
|
|
|
params["i[%s]" % str(i)] = str(t[2]) |
4589
|
|
|
params["o[%s]" % str(i)] = t[3] |
4590
|
|
|
params["r[%s]" % str(i)] = t[4] |
4591
|
|
|
params["l[%s]" % str(i)] = str(t[5]) |
4592
|
|
|
params["b[%s]" % str(i)] = _string(t[6]) |
4593
|
|
|
params["n[%s]" % str(i)] = t[7] |
4594
|
|
|
params["m[%s]" % str(i)] = t[8] |
4595
|
|
|
|
4596
|
|
|
i += 1 |
4597
|
|
|
|
4598
|
|
|
_ScrobblerRequest(self.submissions_url, params, self.network).execute() |
4599
|
|
|
|
4600
|
|
|
if remainder: |
4601
|
|
|
self.scrobble_many(remainder) |
4602
|
|
|
|
4603
|
|
|
# End of file |
4604
|
|
|
|