1
|
|
|
# -*- coding: utf-8 -*- |
2
|
|
|
# |
3
|
|
|
# pylast - |
4
|
|
|
# A Python interface to Last.fm and Libre.fm |
5
|
|
|
# |
6
|
|
|
# Copyright 2008-2010 Amr Hassan |
7
|
|
|
# Copyright 2013-2015 hugovk |
8
|
|
|
# |
9
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
10
|
|
|
# you may not use this file except in compliance with the License. |
11
|
|
|
# You may obtain a copy of the License at |
12
|
|
|
# |
13
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
14
|
|
|
# |
15
|
|
|
# Unless required by applicable law or agreed to in writing, software |
16
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
17
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
18
|
|
|
# See the License for the specific language governing permissions and |
19
|
|
|
# limitations under the License. |
20
|
|
|
# |
21
|
|
|
# https://github.com/pylast/pylast |
22
|
|
|
|
23
|
1 |
|
import hashlib |
24
|
1 |
|
from xml.dom import minidom, Node |
25
|
1 |
|
import xml.dom |
26
|
1 |
|
import time |
27
|
1 |
|
import shelve |
28
|
1 |
|
import tempfile |
29
|
1 |
|
import sys |
30
|
1 |
|
import collections |
31
|
1 |
|
import warnings |
32
|
1 |
|
import re |
33
|
1 |
|
import six |
34
|
|
|
|
35
|
1 |
|
__version__ = '1.4.2' |
36
|
1 |
|
__author__ = 'Amr Hassan, hugovk' |
37
|
1 |
|
__copyright__ = "Copyright (C) 2008-2010 Amr Hassan, 2013-2015 hugovk" |
38
|
1 |
|
__license__ = "apache2" |
39
|
1 |
|
__email__ = '[email protected]' |
40
|
|
|
|
41
|
|
|
|
42
|
1 |
|
def _deprecation_warning(message): |
43
|
|
|
warnings.warn(message, DeprecationWarning) |
44
|
|
|
|
45
|
1 |
|
if sys.version_info[0] == 3: |
46
|
1 |
|
from http.client import HTTPConnection |
47
|
1 |
|
import html.entities as htmlentitydefs |
48
|
1 |
|
from urllib.parse import splithost as url_split_host |
49
|
1 |
|
from urllib.parse import quote_plus as url_quote_plus |
50
|
|
|
|
51
|
1 |
|
unichr = chr |
52
|
|
|
|
53
|
|
|
elif sys.version_info[0] == 2: |
54
|
|
|
from httplib import HTTPConnection |
55
|
|
|
import htmlentitydefs |
56
|
|
|
from urllib import splithost as url_split_host |
57
|
|
|
from urllib import quote_plus as url_quote_plus |
58
|
|
|
|
59
|
1 |
|
STATUS_INVALID_SERVICE = 2 |
60
|
1 |
|
STATUS_INVALID_METHOD = 3 |
61
|
1 |
|
STATUS_AUTH_FAILED = 4 |
62
|
1 |
|
STATUS_INVALID_FORMAT = 5 |
63
|
1 |
|
STATUS_INVALID_PARAMS = 6 |
64
|
1 |
|
STATUS_INVALID_RESOURCE = 7 |
65
|
1 |
|
STATUS_TOKEN_ERROR = 8 |
66
|
1 |
|
STATUS_INVALID_SK = 9 |
67
|
1 |
|
STATUS_INVALID_API_KEY = 10 |
68
|
1 |
|
STATUS_OFFLINE = 11 |
69
|
1 |
|
STATUS_SUBSCRIBERS_ONLY = 12 |
70
|
1 |
|
STATUS_INVALID_SIGNATURE = 13 |
71
|
1 |
|
STATUS_TOKEN_UNAUTHORIZED = 14 |
72
|
1 |
|
STATUS_TOKEN_EXPIRED = 15 |
73
|
|
|
|
74
|
1 |
|
EVENT_ATTENDING = '0' |
75
|
1 |
|
EVENT_MAYBE_ATTENDING = '1' |
76
|
1 |
|
EVENT_NOT_ATTENDING = '2' |
77
|
|
|
|
78
|
1 |
|
PERIOD_OVERALL = 'overall' |
79
|
1 |
|
PERIOD_7DAYS = "7day" |
80
|
1 |
|
PERIOD_3MONTHS = '3month' |
81
|
1 |
|
PERIOD_6MONTHS = '6month' |
82
|
1 |
|
PERIOD_12MONTHS = '12month' |
83
|
|
|
|
84
|
1 |
|
DOMAIN_ENGLISH = 0 |
85
|
1 |
|
DOMAIN_GERMAN = 1 |
86
|
1 |
|
DOMAIN_SPANISH = 2 |
87
|
1 |
|
DOMAIN_FRENCH = 3 |
88
|
1 |
|
DOMAIN_ITALIAN = 4 |
89
|
1 |
|
DOMAIN_POLISH = 5 |
90
|
1 |
|
DOMAIN_PORTUGUESE = 6 |
91
|
1 |
|
DOMAIN_SWEDISH = 7 |
92
|
1 |
|
DOMAIN_TURKISH = 8 |
93
|
1 |
|
DOMAIN_RUSSIAN = 9 |
94
|
1 |
|
DOMAIN_JAPANESE = 10 |
95
|
1 |
|
DOMAIN_CHINESE = 11 |
96
|
|
|
|
97
|
1 |
|
COVER_SMALL = 0 |
98
|
1 |
|
COVER_MEDIUM = 1 |
99
|
1 |
|
COVER_LARGE = 2 |
100
|
1 |
|
COVER_EXTRA_LARGE = 3 |
101
|
1 |
|
COVER_MEGA = 4 |
102
|
|
|
|
103
|
1 |
|
IMAGES_ORDER_POPULARITY = "popularity" |
104
|
1 |
|
IMAGES_ORDER_DATE = "dateadded" |
105
|
|
|
|
106
|
|
|
|
107
|
1 |
|
USER_MALE = 'Male' |
108
|
1 |
|
USER_FEMALE = 'Female' |
109
|
|
|
|
110
|
1 |
|
SCROBBLE_SOURCE_USER = "P" |
111
|
1 |
|
SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST = "R" |
112
|
1 |
|
SCROBBLE_SOURCE_PERSONALIZED_BROADCAST = "E" |
113
|
1 |
|
SCROBBLE_SOURCE_LASTFM = "L" |
114
|
1 |
|
SCROBBLE_SOURCE_UNKNOWN = "U" |
115
|
|
|
|
116
|
1 |
|
SCROBBLE_MODE_PLAYED = "" |
117
|
1 |
|
SCROBBLE_MODE_LOVED = "L" |
118
|
1 |
|
SCROBBLE_MODE_BANNED = "B" |
119
|
1 |
|
SCROBBLE_MODE_SKIPPED = "S" |
120
|
|
|
|
121
|
|
|
# From http://boodebr.org/main/python/all-about-python-and-unicode#UNI_XML |
122
|
1 |
|
RE_XML_ILLEGAL = (u'([\u0000-\u0008\u000b-\u000c\u000e-\u001f\ufffe-\uffff])' + |
123
|
|
|
u'|' + |
124
|
|
|
u'([%s-%s][^%s-%s])|([^%s-%s][%s-%s])|([%s-%s]$)|(^[%s-%s])' |
125
|
|
|
% |
126
|
|
|
(unichr(0xd800), unichr(0xdbff), unichr(0xdc00), |
127
|
|
|
unichr(0xdfff), unichr(0xd800), unichr(0xdbff), |
128
|
|
|
unichr(0xdc00), unichr(0xdfff), unichr(0xd800), |
129
|
|
|
unichr(0xdbff), unichr(0xdc00), unichr(0xdfff))) |
130
|
|
|
|
131
|
1 |
|
XML_ILLEGAL = re.compile(RE_XML_ILLEGAL) |
132
|
|
|
|
133
|
|
|
|
134
|
1 |
|
class _Network(object): |
135
|
|
|
""" |
136
|
|
|
A music social network website such as Last.fm or |
137
|
|
|
one with a Last.fm-compatible API. |
138
|
|
|
""" |
139
|
|
|
|
140
|
1 |
|
def __init__( |
141
|
|
|
self, name, homepage, ws_server, api_key, api_secret, session_key, |
142
|
|
|
submission_server, username, password_hash, domain_names, urls): |
143
|
|
|
""" |
144
|
|
|
name: the name of the network |
145
|
|
|
homepage: the homepage URL |
146
|
|
|
ws_server: the URL of the webservices server |
147
|
|
|
api_key: a provided API_KEY |
148
|
|
|
api_secret: a provided API_SECRET |
149
|
|
|
session_key: a generated session_key or None |
150
|
|
|
submission_server: the URL of the server to which tracks are |
151
|
|
|
submitted (scrobbled) |
152
|
|
|
username: a username of a valid user |
153
|
|
|
password_hash: the output of pylast.md5(password) where password is |
154
|
|
|
the user's password |
155
|
|
|
domain_names: a dict mapping each DOMAIN_* value to a string domain |
156
|
|
|
name |
157
|
|
|
urls: a dict mapping types to URLs |
158
|
|
|
|
159
|
|
|
if username and password_hash were provided and not session_key, |
160
|
|
|
session_key will be generated automatically when needed. |
161
|
|
|
|
162
|
|
|
Either a valid session_key or a combination of username and |
163
|
|
|
password_hash must be present for scrobbling. |
164
|
|
|
|
165
|
|
|
You should use a preconfigured network object through a |
166
|
|
|
get_*_network(...) method instead of creating an object |
167
|
|
|
of this class, unless you know what you're doing. |
168
|
|
|
""" |
169
|
|
|
|
170
|
|
|
self.name = name |
171
|
|
|
self.homepage = homepage |
172
|
|
|
self.ws_server = ws_server |
173
|
|
|
self.api_key = api_key |
174
|
|
|
self.api_secret = api_secret |
175
|
|
|
self.session_key = session_key |
176
|
|
|
self.submission_server = submission_server |
177
|
|
|
self.username = username |
178
|
|
|
self.password_hash = password_hash |
179
|
|
|
self.domain_names = domain_names |
180
|
|
|
self.urls = urls |
181
|
|
|
|
182
|
|
|
self.cache_backend = None |
183
|
|
|
self.proxy_enabled = False |
184
|
|
|
self.proxy = None |
185
|
|
|
self.last_call_time = 0 |
186
|
|
|
self.limit_rate = False |
187
|
|
|
|
188
|
|
|
# Generate a session_key if necessary |
189
|
|
|
if ((self.api_key and self.api_secret) and not self.session_key and |
190
|
|
|
(self.username and self.password_hash)): |
191
|
|
|
sk_gen = SessionKeyGenerator(self) |
192
|
|
|
self.session_key = sk_gen.get_session_key( |
193
|
|
|
self.username, self.password_hash) |
194
|
|
|
|
195
|
1 |
|
def __str__(self): |
196
|
|
|
return "%s Network" % self.name |
197
|
|
|
|
198
|
1 |
|
def get_artist(self, artist_name): |
199
|
|
|
""" |
200
|
|
|
Return an Artist object |
201
|
|
|
""" |
202
|
|
|
|
203
|
|
|
return Artist(artist_name, self) |
204
|
|
|
|
205
|
1 |
|
def get_track(self, artist, title): |
206
|
|
|
""" |
207
|
|
|
Return a Track object |
208
|
|
|
""" |
209
|
|
|
|
210
|
|
|
return Track(artist, title, self) |
211
|
|
|
|
212
|
1 |
|
def get_album(self, artist, title): |
213
|
|
|
""" |
214
|
|
|
Return an Album object |
215
|
|
|
""" |
216
|
|
|
|
217
|
|
|
return Album(artist, title, self) |
218
|
|
|
|
219
|
1 |
|
def get_authenticated_user(self): |
220
|
|
|
""" |
221
|
|
|
Returns the authenticated user |
222
|
|
|
""" |
223
|
|
|
|
224
|
|
|
return AuthenticatedUser(self) |
225
|
|
|
|
226
|
1 |
|
def get_country(self, country_name): |
227
|
|
|
""" |
228
|
|
|
Returns a country object |
229
|
|
|
""" |
230
|
|
|
|
231
|
|
|
return Country(country_name, self) |
232
|
|
|
|
233
|
1 |
|
def get_metro(self, metro_name, country_name): |
234
|
|
|
""" |
235
|
|
|
Returns a metro object |
236
|
|
|
""" |
237
|
|
|
|
238
|
|
|
return Metro(metro_name, country_name, self) |
239
|
|
|
|
240
|
1 |
|
def get_group(self, name): |
241
|
|
|
""" |
242
|
|
|
Returns a Group object |
243
|
|
|
""" |
244
|
|
|
|
245
|
|
|
return Group(name, self) |
246
|
|
|
|
247
|
1 |
|
def get_user(self, username): |
248
|
|
|
""" |
249
|
|
|
Returns a user object |
250
|
|
|
""" |
251
|
|
|
|
252
|
|
|
return User(username, self) |
253
|
|
|
|
254
|
1 |
|
def get_tag(self, name): |
255
|
|
|
""" |
256
|
|
|
Returns a tag object |
257
|
|
|
""" |
258
|
|
|
|
259
|
|
|
return Tag(name, self) |
260
|
|
|
|
261
|
1 |
|
def get_scrobbler(self, client_id, client_version): |
262
|
|
|
""" |
263
|
|
|
Returns a Scrobbler object used for submitting tracks to the server |
264
|
|
|
|
265
|
|
|
Quote from http://www.last.fm/api/submissions: |
266
|
|
|
======== |
267
|
|
|
Client identifiers are used to provide a centrally managed database |
268
|
|
|
of the client versions, allowing clients to be banned if they are |
269
|
|
|
found to be behaving undesirably. The client ID is associated with |
270
|
|
|
a version number on the server, however these are only incremented |
271
|
|
|
if a client is banned and do not have to reflect the version of the |
272
|
|
|
actual client application. |
273
|
|
|
|
274
|
|
|
During development, clients which have not been allocated an |
275
|
|
|
identifier should use the identifier tst, with a version number of |
276
|
|
|
1.0. Do not distribute code or client implementations which use |
277
|
|
|
this test identifier. Do not use the identifiers used by other |
278
|
|
|
clients. |
279
|
|
|
========= |
280
|
|
|
|
281
|
|
|
To obtain a new client identifier please contact: |
282
|
|
|
* Last.fm: [email protected] |
283
|
|
|
* # TODO: list others |
284
|
|
|
|
285
|
|
|
...and provide us with the name of your client and its homepage |
286
|
|
|
address. |
287
|
|
|
""" |
288
|
|
|
|
289
|
|
|
_deprecation_warning( |
290
|
|
|
"Use _Network.scrobble(...), _Network.scrobble_many(...)," |
291
|
|
|
" and Network.update_now_playing(...) instead") |
292
|
|
|
|
293
|
|
|
return Scrobbler(self, client_id, client_version) |
294
|
|
|
|
295
|
1 |
|
def _get_language_domain(self, domain_language): |
296
|
|
|
""" |
297
|
|
|
Returns the mapped domain name of the network to a DOMAIN_* value |
298
|
|
|
""" |
299
|
|
|
|
300
|
|
|
if domain_language in self.domain_names: |
301
|
|
|
return self.domain_names[domain_language] |
302
|
|
|
|
303
|
1 |
|
def _get_url(self, domain, url_type): |
304
|
|
|
return "http://%s/%s" % ( |
305
|
|
|
self._get_language_domain(domain), self.urls[url_type]) |
306
|
|
|
|
307
|
1 |
|
def _get_ws_auth(self): |
308
|
|
|
""" |
309
|
|
|
Returns an (API_KEY, API_SECRET, SESSION_KEY) tuple. |
310
|
|
|
""" |
311
|
|
|
return (self.api_key, self.api_secret, self.session_key) |
312
|
|
|
|
313
|
1 |
|
def _delay_call(self): |
314
|
|
|
""" |
315
|
|
|
Makes sure that web service calls are at least 0.2 seconds apart. |
316
|
|
|
""" |
317
|
|
|
|
318
|
|
|
# Delay time in seconds from section 4.4 of http://www.last.fm/api/tos |
319
|
|
|
DELAY_TIME = 0.2 |
320
|
|
|
now = time.time() |
321
|
|
|
|
322
|
|
|
time_since_last = now - self.last_call_time |
323
|
|
|
|
324
|
|
|
if time_since_last < DELAY_TIME: |
325
|
|
|
time.sleep(DELAY_TIME - time_since_last) |
326
|
|
|
|
327
|
|
|
self.last_call_time = now |
328
|
|
|
|
329
|
1 |
|
def create_new_playlist(self, title, description): |
330
|
|
|
""" |
331
|
|
|
Creates a playlist for the authenticated user and returns it |
332
|
|
|
title: The title of the new playlist. |
333
|
|
|
description: The description of the new playlist. |
334
|
|
|
""" |
335
|
|
|
|
336
|
|
|
params = {} |
337
|
|
|
params['title'] = title |
338
|
|
|
params['description'] = description |
339
|
|
|
|
340
|
|
|
doc = _Request(self, 'playlist.create', params).execute(False) |
341
|
|
|
|
342
|
|
|
e_id = doc.getElementsByTagName("id")[0].firstChild.data |
343
|
|
|
user = doc.getElementsByTagName('playlists')[0].getAttribute('user') |
344
|
|
|
|
345
|
|
|
return Playlist(user, e_id, self) |
346
|
|
|
|
347
|
1 |
|
def get_top_artists(self, limit=None, cacheable=True): |
348
|
|
|
"""Returns the most played artists as a sequence of TopItem objects.""" |
349
|
|
|
|
350
|
|
|
params = {} |
351
|
|
|
if limit: |
352
|
|
|
params["limit"] = limit |
353
|
|
|
|
354
|
|
|
doc = _Request(self, "chart.getTopArtists", params).execute(cacheable) |
355
|
|
|
|
356
|
|
|
return _extract_top_artists(doc, self) |
357
|
|
|
|
358
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
359
|
|
|
"""Returns the most played tracks as a sequence of TopItem objects.""" |
360
|
|
|
|
361
|
|
|
params = {} |
362
|
|
|
if limit: |
363
|
|
|
params["limit"] = limit |
364
|
|
|
|
365
|
|
|
doc = _Request(self, "chart.getTopTracks", params).execute(cacheable) |
366
|
|
|
|
367
|
|
|
seq = [] |
368
|
|
|
for node in doc.getElementsByTagName("track"): |
369
|
|
|
title = _extract(node, "name") |
370
|
|
|
artist = _extract(node, "name", 1) |
371
|
|
|
track = Track(artist, title, self) |
372
|
|
|
weight = _number(_extract(node, "playcount")) |
373
|
|
|
seq.append(TopItem(track, weight)) |
374
|
|
|
|
375
|
|
|
return seq |
376
|
|
|
|
377
|
1 |
|
def get_top_tags(self, limit=None, cacheable=True): |
378
|
|
|
"""Returns the most used tags as a sequence of TopItem objects.""" |
379
|
|
|
|
380
|
|
|
# Last.fm has no "limit" parameter for tag.getTopTags |
381
|
|
|
# so we need to get all (250) and then limit locally |
382
|
|
|
doc = _Request(self, "tag.getTopTags").execute(cacheable) |
383
|
|
|
|
384
|
|
|
seq = [] |
385
|
|
|
for node in doc.getElementsByTagName("tag"): |
386
|
|
|
if limit and len(seq) >= limit: |
387
|
|
|
break |
388
|
|
|
tag = Tag(_extract(node, "name"), self) |
389
|
|
|
weight = _number(_extract(node, "count")) |
390
|
|
|
seq.append(TopItem(tag, weight)) |
391
|
|
|
|
392
|
|
|
return seq |
393
|
|
|
|
394
|
1 |
|
def get_geo_events( |
395
|
|
|
self, longitude=None, latitude=None, location=None, distance=None, |
396
|
|
|
tag=None, festivalsonly=None, limit=None, cacheable=True): |
397
|
|
|
""" |
398
|
|
|
Returns all events in a specific location by country or city name. |
399
|
|
|
Parameters: |
400
|
|
|
longitude (Optional) : Specifies a longitude value to retrieve events |
401
|
|
|
for (service returns nearby events by default) |
402
|
|
|
latitude (Optional) : Specifies a latitude value to retrieve events for |
403
|
|
|
(service returns nearby events by default) |
404
|
|
|
location (Optional) : Specifies a location to retrieve events for |
405
|
|
|
(service returns nearby events by default) |
406
|
|
|
distance (Optional) : Find events within a specified radius |
407
|
|
|
(in kilometres) |
408
|
|
|
tag (Optional) : Specifies a tag to filter by. |
409
|
|
|
festivalsonly[0|1] (Optional) : Whether only festivals should be |
410
|
|
|
returned, or all events. |
411
|
|
|
limit (Optional) : The number of results to fetch per page. |
412
|
|
|
Defaults to 10. |
413
|
|
|
""" |
414
|
|
|
|
415
|
|
|
params = {} |
416
|
|
|
|
417
|
|
|
if longitude: |
418
|
|
|
params["long"] = longitude |
419
|
|
|
if latitude: |
420
|
|
|
params["lat"] = latitude |
421
|
|
|
if location: |
422
|
|
|
params["location"] = location |
423
|
|
|
if limit: |
424
|
|
|
params["limit"] = limit |
425
|
|
|
if distance: |
426
|
|
|
params["distance"] = distance |
427
|
|
|
if tag: |
428
|
|
|
params["tag"] = tag |
429
|
|
|
if festivalsonly: |
430
|
|
|
params["festivalsonly"] = 1 |
431
|
|
|
elif not festivalsonly: |
432
|
|
|
params["festivalsonly"] = 0 |
433
|
|
|
|
434
|
|
|
doc = _Request(self, "geo.getEvents", params).execute(cacheable) |
435
|
|
|
|
436
|
|
|
return _extract_events_from_doc(doc, self) |
437
|
|
|
|
438
|
1 |
|
def get_metro_weekly_chart_dates(self, cacheable=True): |
439
|
|
|
""" |
440
|
|
|
Returns a list of From and To tuples for the available metro charts. |
441
|
|
|
""" |
442
|
|
|
|
443
|
|
|
doc = _Request(self, "geo.getMetroWeeklyChartlist").execute(cacheable) |
444
|
|
|
|
445
|
|
|
seq = [] |
446
|
|
|
for node in doc.getElementsByTagName("chart"): |
447
|
|
|
seq.append((node.getAttribute("from"), node.getAttribute("to"))) |
448
|
|
|
|
449
|
|
|
return seq |
450
|
|
|
|
451
|
1 |
|
def get_metros(self, country=None, cacheable=True): |
452
|
|
|
""" |
453
|
|
|
Get a list of valid countries and metros for use in the other |
454
|
|
|
webservices. |
455
|
|
|
Parameters: |
456
|
|
|
country (Optional) : Optionally restrict the results to those Metros |
457
|
|
|
from a particular country, as defined by the ISO 3166-1 country |
458
|
|
|
names standard. |
459
|
|
|
""" |
460
|
|
|
params = {} |
461
|
|
|
|
462
|
|
|
if country: |
463
|
|
|
params["country"] = country |
464
|
|
|
|
465
|
|
|
doc = _Request(self, "geo.getMetros", params).execute(cacheable) |
466
|
|
|
|
467
|
|
|
metros = doc.getElementsByTagName("metro") |
468
|
|
|
seq = [] |
469
|
|
|
|
470
|
|
|
for metro in metros: |
471
|
|
|
name = _extract(metro, "name") |
472
|
|
|
country = _extract(metro, "country") |
473
|
|
|
|
474
|
|
|
seq.append(Metro(name, country, self)) |
475
|
|
|
|
476
|
|
|
return seq |
477
|
|
|
|
478
|
1 |
|
def get_geo_top_artists(self, country, limit=None, cacheable=True): |
479
|
|
|
"""Get the most popular artists on Last.fm by country. |
480
|
|
|
Parameters: |
481
|
|
|
country (Required) : A country name, as defined by the ISO 3166-1 |
482
|
|
|
country names standard. |
483
|
|
|
limit (Optional) : The number of results to fetch per page. |
484
|
|
|
Defaults to 50. |
485
|
|
|
""" |
486
|
|
|
params = {"country": country} |
487
|
|
|
|
488
|
|
|
if limit: |
489
|
|
|
params["limit"] = limit |
490
|
|
|
|
491
|
|
|
doc = _Request(self, "geo.getTopArtists", params).execute(cacheable) |
492
|
|
|
|
493
|
|
|
return _extract_top_artists(doc, self) |
494
|
|
|
|
495
|
1 |
|
def get_geo_top_tracks( |
496
|
|
|
self, country, location=None, limit=None, cacheable=True): |
497
|
|
|
"""Get the most popular tracks on Last.fm last week by country. |
498
|
|
|
Parameters: |
499
|
|
|
country (Required) : A country name, as defined by the ISO 3166-1 |
500
|
|
|
country names standard |
501
|
|
|
location (Optional) : A metro name, to fetch the charts for |
502
|
|
|
(must be within the country specified) |
503
|
|
|
limit (Optional) : The number of results to fetch per page. |
504
|
|
|
Defaults to 50. |
505
|
|
|
""" |
506
|
|
|
params = {"country": country} |
507
|
|
|
|
508
|
|
|
if location: |
509
|
|
|
params["location"] = location |
510
|
|
|
if limit: |
511
|
|
|
params["limit"] = limit |
512
|
|
|
|
513
|
|
|
doc = _Request(self, "geo.getTopTracks", params).execute(cacheable) |
514
|
|
|
|
515
|
|
|
tracks = doc.getElementsByTagName("track") |
516
|
|
|
seq = [] |
517
|
|
|
|
518
|
|
|
for track in tracks: |
519
|
|
|
title = _extract(track, "name") |
520
|
|
|
artist = _extract(track, "name", 1) |
521
|
|
|
listeners = _extract(track, "listeners") |
522
|
|
|
|
523
|
|
|
seq.append(TopItem(Track(artist, title, self), listeners)) |
524
|
|
|
|
525
|
|
|
return seq |
526
|
|
|
|
527
|
1 |
|
def enable_proxy(self, host, port): |
528
|
|
|
"""Enable a default web proxy""" |
529
|
|
|
|
530
|
|
|
self.proxy = [host, _number(port)] |
531
|
|
|
self.proxy_enabled = True |
532
|
|
|
|
533
|
1 |
|
def disable_proxy(self): |
534
|
|
|
"""Disable using the web proxy""" |
535
|
|
|
|
536
|
|
|
self.proxy_enabled = False |
537
|
|
|
|
538
|
1 |
|
def is_proxy_enabled(self): |
539
|
|
|
"""Returns True if a web proxy is enabled.""" |
540
|
|
|
|
541
|
|
|
return self.proxy_enabled |
542
|
|
|
|
543
|
1 |
|
def _get_proxy(self): |
544
|
|
|
"""Returns proxy details.""" |
545
|
|
|
|
546
|
|
|
return self.proxy |
547
|
|
|
|
548
|
1 |
|
def enable_rate_limit(self): |
549
|
|
|
"""Enables rate limiting for this network""" |
550
|
|
|
self.limit_rate = True |
551
|
|
|
|
552
|
1 |
|
def disable_rate_limit(self): |
553
|
|
|
"""Disables rate limiting for this network""" |
554
|
|
|
self.limit_rate = False |
555
|
|
|
|
556
|
1 |
|
def is_rate_limited(self): |
557
|
|
|
"""Return True if web service calls are rate limited""" |
558
|
|
|
return self.limit_rate |
559
|
|
|
|
560
|
1 |
|
def enable_caching(self, file_path=None): |
561
|
|
|
"""Enables caching request-wide for all cacheable calls. |
562
|
|
|
|
563
|
|
|
* file_path: A file path for the backend storage file. If |
564
|
|
|
None set, a temp file would probably be created, according the backend. |
565
|
|
|
""" |
566
|
|
|
|
567
|
|
|
if not file_path: |
568
|
|
|
file_path = tempfile.mktemp(prefix="pylast_tmp_") |
569
|
|
|
|
570
|
|
|
self.cache_backend = _ShelfCacheBackend(file_path) |
571
|
|
|
|
572
|
1 |
|
def disable_caching(self): |
573
|
|
|
"""Disables all caching features.""" |
574
|
|
|
|
575
|
|
|
self.cache_backend = None |
576
|
|
|
|
577
|
1 |
|
def is_caching_enabled(self): |
578
|
|
|
"""Returns True if caching is enabled.""" |
579
|
|
|
|
580
|
|
|
return not (self.cache_backend is None) |
581
|
|
|
|
582
|
1 |
|
def _get_cache_backend(self): |
583
|
|
|
|
584
|
|
|
return self.cache_backend |
585
|
|
|
|
586
|
1 |
|
def search_for_album(self, album_name): |
587
|
|
|
"""Searches for an album by its name. Returns a AlbumSearch object. |
588
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
589
|
|
|
|
590
|
|
|
return AlbumSearch(album_name, self) |
591
|
|
|
|
592
|
1 |
|
def search_for_artist(self, artist_name): |
593
|
|
|
"""Searches of an artist by its name. Returns a ArtistSearch object. |
594
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
595
|
|
|
|
596
|
|
|
return ArtistSearch(artist_name, self) |
597
|
|
|
|
598
|
1 |
|
def search_for_tag(self, tag_name): |
599
|
|
|
"""Searches of a tag by its name. Returns a TagSearch object. |
600
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
601
|
|
|
|
602
|
|
|
return TagSearch(tag_name, self) |
603
|
|
|
|
604
|
1 |
|
def search_for_track(self, artist_name, track_name): |
605
|
|
|
"""Searches of a track by its name and its artist. Set artist to an |
606
|
|
|
empty string if not available. |
607
|
|
|
Returns a TrackSearch object. |
608
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
609
|
|
|
|
610
|
|
|
return TrackSearch(artist_name, track_name, self) |
611
|
|
|
|
612
|
1 |
|
def search_for_venue(self, venue_name, country_name): |
613
|
|
|
"""Searches of a venue by its name and its country. Set country_name to |
614
|
|
|
an empty string if not available. |
615
|
|
|
Returns a VenueSearch object. |
616
|
|
|
Use get_next_page() to retrieve sequences of results.""" |
617
|
|
|
|
618
|
|
|
return VenueSearch(venue_name, country_name, self) |
619
|
|
|
|
620
|
1 |
|
def get_track_by_mbid(self, mbid): |
621
|
|
|
"""Looks up a track by its MusicBrainz ID""" |
622
|
|
|
|
623
|
|
|
params = {"mbid": mbid} |
624
|
|
|
|
625
|
|
|
doc = _Request(self, "track.getInfo", params).execute(True) |
626
|
|
|
|
627
|
|
|
return Track(_extract(doc, "name", 1), _extract(doc, "name"), self) |
628
|
|
|
|
629
|
1 |
|
def get_artist_by_mbid(self, mbid): |
630
|
|
|
"""Loooks up an artist by its MusicBrainz ID""" |
631
|
|
|
|
632
|
|
|
params = {"mbid": mbid} |
633
|
|
|
|
634
|
|
|
doc = _Request(self, "artist.getInfo", params).execute(True) |
635
|
|
|
|
636
|
|
|
return Artist(_extract(doc, "name"), self) |
637
|
|
|
|
638
|
1 |
|
def get_album_by_mbid(self, mbid): |
639
|
|
|
"""Looks up an album by its MusicBrainz ID""" |
640
|
|
|
|
641
|
|
|
params = {"mbid": mbid} |
642
|
|
|
|
643
|
|
|
doc = _Request(self, "album.getInfo", params).execute(True) |
644
|
|
|
|
645
|
|
|
return Album(_extract(doc, "artist"), _extract(doc, "name"), self) |
646
|
|
|
|
647
|
1 |
|
def update_now_playing( |
648
|
|
|
self, artist, title, album=None, album_artist=None, |
649
|
|
|
duration=None, track_number=None, mbid=None, context=None): |
650
|
|
|
""" |
651
|
|
|
Used to notify Last.fm that a user has started listening to a track. |
652
|
|
|
|
653
|
|
|
Parameters: |
654
|
|
|
artist (Required) : The artist name |
655
|
|
|
title (Required) : The track title |
656
|
|
|
album (Optional) : The album name. |
657
|
|
|
album_artist (Optional) : The album artist - if this differs |
658
|
|
|
from the track artist. |
659
|
|
|
duration (Optional) : The length of the track in seconds. |
660
|
|
|
track_number (Optional) : The track number of the track on the |
661
|
|
|
album. |
662
|
|
|
mbid (Optional) : The MusicBrainz Track ID. |
663
|
|
|
context (Optional) : Sub-client version |
664
|
|
|
(not public, only enabled for certain API keys) |
665
|
|
|
""" |
666
|
|
|
|
667
|
|
|
params = {"track": title, "artist": artist} |
668
|
|
|
|
669
|
|
|
if album: |
670
|
|
|
params["album"] = album |
671
|
|
|
if album_artist: |
672
|
|
|
params["albumArtist"] = album_artist |
673
|
|
|
if context: |
674
|
|
|
params["context"] = context |
675
|
|
|
if track_number: |
676
|
|
|
params["trackNumber"] = track_number |
677
|
|
|
if mbid: |
678
|
|
|
params["mbid"] = mbid |
679
|
|
|
if duration: |
680
|
|
|
params["duration"] = duration |
681
|
|
|
|
682
|
|
|
_Request(self, "track.updateNowPlaying", params).execute() |
683
|
|
|
|
684
|
1 |
|
def scrobble( |
685
|
|
|
self, artist, title, timestamp, album=None, album_artist=None, |
686
|
|
|
track_number=None, duration=None, stream_id=None, context=None, |
687
|
|
|
mbid=None): |
688
|
|
|
|
689
|
|
|
"""Used to add a track-play to a user's profile. |
690
|
|
|
|
691
|
|
|
Parameters: |
692
|
|
|
artist (Required) : The artist name. |
693
|
|
|
title (Required) : The track name. |
694
|
|
|
timestamp (Required) : The time the track started playing, in UNIX |
695
|
|
|
timestamp format (integer number of seconds since 00:00:00, |
696
|
|
|
January 1st 1970 UTC). This must be in the UTC time zone. |
697
|
|
|
album (Optional) : The album name. |
698
|
|
|
album_artist (Optional) : The album artist - if this differs from |
699
|
|
|
the track artist. |
700
|
|
|
context (Optional) : Sub-client version (not public, only enabled |
701
|
|
|
for certain API keys) |
702
|
|
|
stream_id (Optional) : The stream id for this track received from |
703
|
|
|
the radio.getPlaylist service. |
704
|
|
|
track_number (Optional) : The track number of the track on the |
705
|
|
|
album. |
706
|
|
|
mbid (Optional) : The MusicBrainz Track ID. |
707
|
|
|
duration (Optional) : The length of the track in seconds. |
708
|
|
|
""" |
709
|
|
|
|
710
|
|
|
return self.scrobble_many(({ |
711
|
|
|
"artist": artist, "title": title, "timestamp": timestamp, |
712
|
|
|
"album": album, "album_artist": album_artist, |
713
|
|
|
"track_number": track_number, "duration": duration, |
714
|
|
|
"stream_id": stream_id, "context": context, "mbid": mbid},)) |
715
|
|
|
|
716
|
1 |
|
def scrobble_many(self, tracks): |
717
|
|
|
""" |
718
|
|
|
Used to scrobble a batch of tracks at once. The parameter tracks is a |
719
|
|
|
sequence of dicts per track containing the keyword arguments as if |
720
|
|
|
passed to the scrobble() method. |
721
|
|
|
""" |
722
|
|
|
|
723
|
|
|
tracks_to_scrobble = tracks[:50] |
724
|
|
|
if len(tracks) > 50: |
725
|
|
|
remaining_tracks = tracks[50:] |
726
|
|
|
else: |
727
|
|
|
remaining_tracks = None |
728
|
|
|
|
729
|
|
|
params = {} |
730
|
|
|
for i in range(len(tracks_to_scrobble)): |
731
|
|
|
|
732
|
|
|
params["artist[%d]" % i] = tracks_to_scrobble[i]["artist"] |
733
|
|
|
params["track[%d]" % i] = tracks_to_scrobble[i]["title"] |
734
|
|
|
|
735
|
|
|
additional_args = ( |
736
|
|
|
"timestamp", "album", "album_artist", "context", |
737
|
|
|
"stream_id", "track_number", "mbid", "duration") |
738
|
|
|
args_map_to = { # so friggin lazy |
739
|
|
|
"album_artist": "albumArtist", |
740
|
|
|
"track_number": "trackNumber", |
741
|
|
|
"stream_id": "streamID"} |
742
|
|
|
|
743
|
|
|
for arg in additional_args: |
744
|
|
|
|
745
|
|
|
if arg in tracks_to_scrobble[i] and tracks_to_scrobble[i][arg]: |
746
|
|
|
if arg in args_map_to: |
747
|
|
|
maps_to = args_map_to[arg] |
748
|
|
|
else: |
749
|
|
|
maps_to = arg |
750
|
|
|
|
751
|
|
|
params[ |
752
|
|
|
"%s[%d]" % (maps_to, i)] = tracks_to_scrobble[i][arg] |
753
|
|
|
|
754
|
|
|
_Request(self, "track.scrobble", params).execute() |
755
|
|
|
|
756
|
|
|
if remaining_tracks: |
757
|
|
|
self.scrobble_many(remaining_tracks) |
758
|
|
|
|
759
|
1 |
|
def get_play_links(self, link_type, things, cacheable=True): |
760
|
|
|
method = link_type + ".getPlaylinks" |
761
|
|
|
params = {} |
762
|
|
|
|
763
|
|
|
for i, thing in enumerate(things): |
764
|
|
|
if link_type == "artist": |
765
|
|
|
params['artist[' + str(i) + ']'] = thing |
766
|
|
|
elif link_type == "album": |
767
|
|
|
params['artist[' + str(i) + ']'] = thing.artist |
768
|
|
|
params['album[' + str(i) + ']'] = thing.title |
769
|
|
|
elif link_type == "track": |
770
|
|
|
params['artist[' + str(i) + ']'] = thing.artist |
771
|
|
|
params['track[' + str(i) + ']'] = thing.title |
772
|
|
|
|
773
|
|
|
doc = _Request(self, method, params).execute(cacheable) |
774
|
|
|
|
775
|
|
|
seq = [] |
776
|
|
|
|
777
|
|
|
for node in doc.getElementsByTagName("externalids"): |
778
|
|
|
spotify = _extract(node, "spotify") |
779
|
|
|
seq.append(spotify) |
780
|
|
|
|
781
|
|
|
return seq |
782
|
|
|
|
783
|
1 |
|
def get_artist_play_links(self, artists, cacheable=True): |
784
|
|
|
return self.get_play_links("artist", artists, cacheable) |
785
|
|
|
|
786
|
1 |
|
def get_album_play_links(self, albums, cacheable=True): |
787
|
|
|
return self.get_play_links("album", albums, cacheable) |
788
|
|
|
|
789
|
1 |
|
def get_track_play_links(self, tracks, cacheable=True): |
790
|
|
|
return self.get_play_links("track", tracks, cacheable) |
791
|
|
|
|
792
|
|
|
|
793
|
1 |
|
class LastFMNetwork(_Network): |
|
|
|
|
794
|
|
|
|
795
|
|
|
"""A Last.fm network object |
796
|
|
|
|
797
|
|
|
api_key: a provided API_KEY |
798
|
|
|
api_secret: a provided API_SECRET |
799
|
|
|
session_key: a generated session_key or None |
800
|
|
|
username: a username of a valid user |
801
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
802
|
|
|
user's password |
803
|
|
|
|
804
|
|
|
if username and password_hash were provided and not session_key, |
805
|
|
|
session_key will be generated automatically when needed. |
806
|
|
|
|
807
|
|
|
Either a valid session_key or a combination of username and password_hash |
808
|
|
|
must be present for scrobbling. |
809
|
|
|
|
810
|
|
|
Most read-only webservices only require an api_key and an api_secret, see |
811
|
|
|
about obtaining them from: |
812
|
|
|
http://www.last.fm/api/account |
813
|
|
|
""" |
814
|
|
|
|
815
|
1 |
|
def __init__( |
816
|
|
|
self, api_key="", api_secret="", session_key="", username="", |
817
|
|
|
password_hash=""): |
818
|
|
|
_Network.__init__( |
819
|
|
|
self, |
820
|
|
|
name="Last.fm", |
821
|
|
|
homepage="http://last.fm", |
822
|
|
|
ws_server=("ws.audioscrobbler.com", "/2.0/"), |
823
|
|
|
api_key=api_key, |
824
|
|
|
api_secret=api_secret, |
825
|
|
|
session_key=session_key, |
826
|
|
|
submission_server="http://post.audioscrobbler.com:80/", |
827
|
|
|
username=username, |
828
|
|
|
password_hash=password_hash, |
829
|
|
|
domain_names={ |
830
|
|
|
DOMAIN_ENGLISH: 'www.last.fm', |
831
|
|
|
DOMAIN_GERMAN: 'www.lastfm.de', |
832
|
|
|
DOMAIN_SPANISH: 'www.lastfm.es', |
833
|
|
|
DOMAIN_FRENCH: 'www.lastfm.fr', |
834
|
|
|
DOMAIN_ITALIAN: 'www.lastfm.it', |
835
|
|
|
DOMAIN_POLISH: 'www.lastfm.pl', |
836
|
|
|
DOMAIN_PORTUGUESE: 'www.lastfm.com.br', |
837
|
|
|
DOMAIN_SWEDISH: 'www.lastfm.se', |
838
|
|
|
DOMAIN_TURKISH: 'www.lastfm.com.tr', |
839
|
|
|
DOMAIN_RUSSIAN: 'www.lastfm.ru', |
840
|
|
|
DOMAIN_JAPANESE: 'www.lastfm.jp', |
841
|
|
|
DOMAIN_CHINESE: 'cn.last.fm', |
842
|
|
|
}, |
843
|
|
|
urls={ |
844
|
|
|
"album": "music/%(artist)s/%(album)s", |
845
|
|
|
"artist": "music/%(artist)s", |
846
|
|
|
"event": "event/%(id)s", |
847
|
|
|
"country": "place/%(country_name)s", |
848
|
|
|
"playlist": "user/%(user)s/library/playlists/%(appendix)s", |
849
|
|
|
"tag": "tag/%(name)s", |
850
|
|
|
"track": "music/%(artist)s/_/%(title)s", |
851
|
|
|
"group": "group/%(name)s", |
852
|
|
|
"user": "user/%(name)s", |
853
|
|
|
} |
854
|
|
|
) |
855
|
|
|
|
856
|
1 |
|
def __repr__(self): |
857
|
|
|
return "pylast.LastFMNetwork(%s)" % (", ".join( |
858
|
|
|
("'%s'" % self.api_key, |
859
|
|
|
"'%s'" % self.api_secret, |
860
|
|
|
"'%s'" % self.session_key, |
861
|
|
|
"'%s'" % self.username, |
862
|
|
|
"'%s'" % self.password_hash))) |
863
|
|
|
|
864
|
|
|
|
865
|
1 |
|
def get_lastfm_network( |
866
|
|
|
api_key="", api_secret="", session_key="", username="", |
867
|
|
|
password_hash=""): |
868
|
|
|
""" |
869
|
|
|
Returns a preconfigured _Network object for Last.fm |
870
|
|
|
|
871
|
|
|
api_key: a provided API_KEY |
872
|
|
|
api_secret: a provided API_SECRET |
873
|
|
|
session_key: a generated session_key or None |
874
|
|
|
username: a username of a valid user |
875
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
876
|
|
|
user's password |
877
|
|
|
|
878
|
|
|
if username and password_hash were provided and not session_key, |
879
|
|
|
session_key will be generated automatically when needed. |
880
|
|
|
|
881
|
|
|
Either a valid session_key or a combination of username and password_hash |
882
|
|
|
must be present for scrobbling. |
883
|
|
|
|
884
|
|
|
Most read-only webservices only require an api_key and an api_secret, see |
885
|
|
|
about obtaining them from: |
886
|
|
|
http://www.last.fm/api/account |
887
|
|
|
""" |
888
|
|
|
|
889
|
|
|
_deprecation_warning("Create a LastFMNetwork object instead") |
890
|
|
|
|
891
|
|
|
return LastFMNetwork( |
892
|
|
|
api_key, api_secret, session_key, username, password_hash) |
893
|
|
|
|
894
|
|
|
|
895
|
1 |
|
class LibreFMNetwork(_Network): |
|
|
|
|
896
|
|
|
""" |
897
|
|
|
A preconfigured _Network object for Libre.fm |
898
|
|
|
|
899
|
|
|
api_key: a provided API_KEY |
900
|
|
|
api_secret: a provided API_SECRET |
901
|
|
|
session_key: a generated session_key or None |
902
|
|
|
username: a username of a valid user |
903
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
904
|
|
|
user's password |
905
|
|
|
|
906
|
|
|
if username and password_hash were provided and not session_key, |
907
|
|
|
session_key will be generated automatically when needed. |
908
|
|
|
""" |
909
|
|
|
|
910
|
1 |
|
def __init__( |
911
|
|
|
self, api_key="", api_secret="", session_key="", username="", |
912
|
|
|
password_hash=""): |
913
|
|
|
|
914
|
|
|
_Network.__init__( |
915
|
|
|
self, |
916
|
|
|
name="Libre.fm", |
917
|
|
|
homepage="http://alpha.libre.fm", |
918
|
|
|
ws_server=("alpha.libre.fm", "/2.0/"), |
919
|
|
|
api_key=api_key, |
920
|
|
|
api_secret=api_secret, |
921
|
|
|
session_key=session_key, |
922
|
|
|
submission_server="http://turtle.libre.fm:80/", |
923
|
|
|
username=username, |
924
|
|
|
password_hash=password_hash, |
925
|
|
|
domain_names={ |
926
|
|
|
DOMAIN_ENGLISH: "alpha.libre.fm", |
927
|
|
|
DOMAIN_GERMAN: "alpha.libre.fm", |
928
|
|
|
DOMAIN_SPANISH: "alpha.libre.fm", |
929
|
|
|
DOMAIN_FRENCH: "alpha.libre.fm", |
930
|
|
|
DOMAIN_ITALIAN: "alpha.libre.fm", |
931
|
|
|
DOMAIN_POLISH: "alpha.libre.fm", |
932
|
|
|
DOMAIN_PORTUGUESE: "alpha.libre.fm", |
933
|
|
|
DOMAIN_SWEDISH: "alpha.libre.fm", |
934
|
|
|
DOMAIN_TURKISH: "alpha.libre.fm", |
935
|
|
|
DOMAIN_RUSSIAN: "alpha.libre.fm", |
936
|
|
|
DOMAIN_JAPANESE: "alpha.libre.fm", |
937
|
|
|
DOMAIN_CHINESE: "alpha.libre.fm", |
938
|
|
|
}, |
939
|
|
|
urls={ |
940
|
|
|
"album": "artist/%(artist)s/album/%(album)s", |
941
|
|
|
"artist": "artist/%(artist)s", |
942
|
|
|
"event": "event/%(id)s", |
943
|
|
|
"country": "place/%(country_name)s", |
944
|
|
|
"playlist": "user/%(user)s/library/playlists/%(appendix)s", |
945
|
|
|
"tag": "tag/%(name)s", |
946
|
|
|
"track": "music/%(artist)s/_/%(title)s", |
947
|
|
|
"group": "group/%(name)s", |
948
|
|
|
"user": "user/%(name)s", |
949
|
|
|
} |
950
|
|
|
) |
951
|
|
|
|
952
|
1 |
|
def __repr__(self): |
953
|
|
|
return "pylast.LibreFMNetwork(%s)" % (", ".join( |
954
|
|
|
("'%s'" % self.api_key, |
955
|
|
|
"'%s'" % self.api_secret, |
956
|
|
|
"'%s'" % self.session_key, |
957
|
|
|
"'%s'" % self.username, |
958
|
|
|
"'%s'" % self.password_hash))) |
959
|
|
|
|
960
|
|
|
|
961
|
1 |
|
def get_librefm_network( |
962
|
|
|
api_key="", api_secret="", session_key="", username="", |
963
|
|
|
password_hash=""): |
964
|
|
|
""" |
965
|
|
|
Returns a preconfigured _Network object for Libre.fm |
966
|
|
|
|
967
|
|
|
api_key: a provided API_KEY |
968
|
|
|
api_secret: a provided API_SECRET |
969
|
|
|
session_key: a generated session_key or None |
970
|
|
|
username: a username of a valid user |
971
|
|
|
password_hash: the output of pylast.md5(password) where password is the |
972
|
|
|
user's password |
973
|
|
|
|
974
|
|
|
if username and password_hash were provided and not session_key, |
975
|
|
|
session_key will be generated automatically when needed. |
976
|
|
|
""" |
977
|
|
|
|
978
|
|
|
_deprecation_warning( |
979
|
|
|
"DeprecationWarning: Create a LibreFMNetwork object instead") |
980
|
|
|
|
981
|
|
|
return LibreFMNetwork( |
982
|
|
|
api_key, api_secret, session_key, username, password_hash) |
983
|
|
|
|
984
|
|
|
|
985
|
1 |
|
class _ShelfCacheBackend(object): |
986
|
|
|
"""Used as a backend for caching cacheable requests.""" |
987
|
1 |
|
def __init__(self, file_path=None): |
988
|
|
|
self.shelf = shelve.open(file_path) |
989
|
|
|
|
990
|
1 |
|
def __iter__(self): |
991
|
|
|
return iter(self.shelf.keys()) |
992
|
|
|
|
993
|
1 |
|
def get_xml(self, key): |
994
|
|
|
return self.shelf[key] |
995
|
|
|
|
996
|
1 |
|
def set_xml(self, key, xml_string): |
997
|
|
|
self.shelf[key] = xml_string |
998
|
|
|
|
999
|
|
|
|
1000
|
1 |
|
class _Request(object): |
1001
|
|
|
"""Representing an abstract web service operation.""" |
1002
|
|
|
|
1003
|
1 |
|
def __init__(self, network, method_name, params={}): |
1004
|
|
|
|
1005
|
1 |
|
self.network = network |
1006
|
1 |
|
self.params = {} |
1007
|
|
|
|
1008
|
1 |
|
for key in params: |
1009
|
1 |
|
self.params[key] = _unicode(params[key]) |
1010
|
|
|
|
1011
|
1 |
|
(self.api_key, self.api_secret, self.session_key) = \ |
1012
|
|
|
network._get_ws_auth() |
1013
|
|
|
|
1014
|
1 |
|
self.params["api_key"] = self.api_key |
1015
|
1 |
|
self.params["method"] = method_name |
1016
|
|
|
|
1017
|
1 |
|
if network.is_caching_enabled(): |
1018
|
1 |
|
self.cache = network._get_cache_backend() |
1019
|
|
|
|
1020
|
1 |
|
if self.session_key: |
1021
|
|
|
self.params["sk"] = self.session_key |
1022
|
|
|
self.sign_it() |
1023
|
|
|
|
1024
|
1 |
|
def sign_it(self): |
1025
|
|
|
"""Sign this request.""" |
1026
|
|
|
|
1027
|
|
|
if "api_sig" not in self.params.keys(): |
1028
|
|
|
self.params['api_sig'] = self._get_signature() |
1029
|
|
|
|
1030
|
1 |
|
def _get_signature(self): |
1031
|
|
|
""" |
1032
|
|
|
Returns a 32-character hexadecimal md5 hash of the signature string. |
1033
|
|
|
""" |
1034
|
|
|
|
1035
|
|
|
keys = list(self.params.keys()) |
1036
|
|
|
|
1037
|
|
|
keys.sort() |
1038
|
|
|
|
1039
|
|
|
string = "" |
1040
|
|
|
|
1041
|
|
|
for name in keys: |
1042
|
|
|
string += name |
1043
|
|
|
string += self.params[name] |
1044
|
|
|
|
1045
|
|
|
string += self.api_secret |
1046
|
|
|
|
1047
|
|
|
return md5(string) |
1048
|
|
|
|
1049
|
1 |
|
def _get_cache_key(self): |
1050
|
|
|
""" |
1051
|
|
|
The cache key is a string of concatenated sorted names and values. |
1052
|
|
|
""" |
1053
|
|
|
|
1054
|
1 |
|
keys = list(self.params.keys()) |
1055
|
1 |
|
keys.sort() |
1056
|
|
|
|
1057
|
1 |
|
cache_key = str() |
1058
|
|
|
|
1059
|
1 |
|
for key in keys: |
1060
|
1 |
|
if key != "api_sig" and key != "api_key" and key != "sk": |
1061
|
1 |
|
cache_key += key + self.params[key] |
1062
|
|
|
|
1063
|
1 |
|
return hashlib.sha1(cache_key.encode("utf-8")).hexdigest() |
1064
|
|
|
|
1065
|
1 |
|
def _get_cached_response(self): |
1066
|
|
|
"""Returns a file object of the cached response.""" |
1067
|
|
|
|
1068
|
|
|
if not self._is_cached(): |
1069
|
|
|
response = self._download_response() |
1070
|
|
|
self.cache.set_xml(self._get_cache_key(), response) |
1071
|
|
|
|
1072
|
|
|
return self.cache.get_xml(self._get_cache_key()) |
1073
|
|
|
|
1074
|
1 |
|
def _is_cached(self): |
1075
|
|
|
"""Returns True if the request is already in cache.""" |
1076
|
|
|
|
1077
|
|
|
return self._get_cache_key() in self.cache |
1078
|
|
|
|
1079
|
1 |
|
def _download_response(self): |
1080
|
|
|
"""Returns a response body string from the server.""" |
1081
|
|
|
|
1082
|
|
|
if self.network.limit_rate: |
1083
|
|
|
self.network._delay_call() |
1084
|
|
|
|
1085
|
|
|
data = [] |
1086
|
|
|
for name in self.params.keys(): |
1087
|
|
|
data.append('='.join(( |
1088
|
|
|
name, url_quote_plus(_string(self.params[name]))))) |
1089
|
|
|
data = '&'.join(data) |
1090
|
|
|
|
1091
|
|
|
headers = { |
1092
|
|
|
"Content-type": "application/x-www-form-urlencoded", |
1093
|
|
|
'Accept-Charset': 'utf-8', |
1094
|
|
|
'User-Agent': "pylast" + '/' + __version__ |
1095
|
|
|
} |
1096
|
|
|
|
1097
|
|
|
(HOST_NAME, HOST_SUBDIR) = self.network.ws_server |
1098
|
|
|
|
1099
|
|
|
if self.network.is_proxy_enabled(): |
1100
|
|
|
conn = HTTPConnection( |
1101
|
|
|
host=self.network._get_proxy()[0], |
1102
|
|
|
port=self.network._get_proxy()[1]) |
1103
|
|
|
|
1104
|
|
|
try: |
1105
|
|
|
conn.request( |
1106
|
|
|
method='POST', url="http://" + HOST_NAME + HOST_SUBDIR, |
1107
|
|
|
body=data, headers=headers) |
1108
|
|
|
except Exception as e: |
1109
|
|
|
raise NetworkError(self.network, e) |
1110
|
|
|
|
1111
|
|
|
else: |
1112
|
|
|
conn = HTTPConnection(host=HOST_NAME) |
1113
|
|
|
|
1114
|
|
|
try: |
1115
|
|
|
conn.request( |
1116
|
|
|
method='POST', url=HOST_SUBDIR, body=data, headers=headers) |
1117
|
|
|
except Exception as e: |
1118
|
|
|
raise NetworkError(self.network, e) |
1119
|
|
|
|
1120
|
|
|
try: |
1121
|
|
|
response_text = _unicode(conn.getresponse().read()) |
1122
|
|
|
except Exception as e: |
1123
|
|
|
raise MalformedResponseError(self.network, e) |
1124
|
|
|
|
1125
|
|
|
response_text = XML_ILLEGAL.sub("?", response_text) |
1126
|
|
|
|
1127
|
|
|
self._check_response_for_errors(response_text) |
1128
|
|
|
return response_text |
1129
|
|
|
|
1130
|
1 |
|
def execute(self, cacheable=False): |
1131
|
|
|
"""Returns the XML DOM response of the POST Request from the server""" |
1132
|
|
|
|
1133
|
|
|
if self.network.is_caching_enabled() and cacheable: |
1134
|
|
|
response = self._get_cached_response() |
1135
|
|
|
else: |
1136
|
|
|
response = self._download_response() |
1137
|
|
|
|
1138
|
|
|
return minidom.parseString(_string(response)) |
1139
|
|
|
|
1140
|
1 |
|
def _check_response_for_errors(self, response): |
1141
|
|
|
"""Checks the response for errors and raises one if any exists.""" |
1142
|
|
|
|
1143
|
|
|
try: |
1144
|
|
|
doc = minidom.parseString(_string(response)) |
1145
|
|
|
except Exception as e: |
1146
|
|
|
raise MalformedResponseError(self.network, e) |
1147
|
|
|
|
1148
|
|
|
e = doc.getElementsByTagName('lfm')[0] |
1149
|
|
|
|
1150
|
|
|
if e.getAttribute('status') != "ok": |
1151
|
|
|
e = doc.getElementsByTagName('error')[0] |
1152
|
|
|
status = e.getAttribute('code') |
1153
|
|
|
details = e.firstChild.data.strip() |
1154
|
|
|
raise WSError(self.network, status, details) |
1155
|
|
|
|
1156
|
|
|
|
1157
|
1 |
|
class SessionKeyGenerator(object): |
1158
|
|
|
"""Methods of generating a session key: |
1159
|
|
|
1) Web Authentication: |
1160
|
|
|
a. network = get_*_network(API_KEY, API_SECRET) |
1161
|
|
|
b. sg = SessionKeyGenerator(network) |
1162
|
|
|
c. url = sg.get_web_auth_url() |
1163
|
|
|
d. Ask the user to open the url and authorize you, and wait for it. |
1164
|
|
|
e. session_key = sg.get_web_auth_session_key(url) |
1165
|
|
|
2) Username and Password Authentication: |
1166
|
|
|
a. network = get_*_network(API_KEY, API_SECRET) |
1167
|
|
|
b. username = raw_input("Please enter your username: ") |
1168
|
|
|
c. password_hash = pylast.md5(raw_input("Please enter your password: ") |
1169
|
|
|
d. session_key = SessionKeyGenerator(network).get_session_key(username, |
1170
|
|
|
password_hash) |
1171
|
|
|
|
1172
|
|
|
A session key's lifetime is infinite, unless the user revokes the rights |
1173
|
|
|
of the given API Key. |
1174
|
|
|
|
1175
|
|
|
If you create a Network object with just a API_KEY and API_SECRET and a |
1176
|
|
|
username and a password_hash, a SESSION_KEY will be automatically generated |
1177
|
|
|
for that network and stored in it so you don't have to do this manually, |
1178
|
|
|
unless you want to. |
1179
|
|
|
""" |
1180
|
|
|
|
1181
|
1 |
|
def __init__(self, network): |
1182
|
|
|
self.network = network |
1183
|
|
|
self.web_auth_tokens = {} |
1184
|
|
|
|
1185
|
1 |
|
def _get_web_auth_token(self): |
1186
|
|
|
""" |
1187
|
|
|
Retrieves a token from the network for web authentication. |
1188
|
|
|
The token then has to be authorized from getAuthURL before creating |
1189
|
|
|
session. |
1190
|
|
|
""" |
1191
|
|
|
|
1192
|
|
|
request = _Request(self.network, 'auth.getToken') |
1193
|
|
|
|
1194
|
|
|
# default action is that a request is signed only when |
1195
|
|
|
# a session key is provided. |
1196
|
|
|
request.sign_it() |
1197
|
|
|
|
1198
|
|
|
doc = request.execute() |
1199
|
|
|
|
1200
|
|
|
e = doc.getElementsByTagName('token')[0] |
1201
|
|
|
return e.firstChild.data |
1202
|
|
|
|
1203
|
1 |
|
def get_web_auth_url(self): |
1204
|
|
|
""" |
1205
|
|
|
The user must open this page, and you first, then |
1206
|
|
|
call get_web_auth_session_key(url) after that. |
1207
|
|
|
""" |
1208
|
|
|
|
1209
|
|
|
token = self._get_web_auth_token() |
1210
|
|
|
|
1211
|
|
|
url = '%(homepage)s/api/auth/?api_key=%(api)s&token=%(token)s' % \ |
1212
|
|
|
{"homepage": self.network.homepage, |
1213
|
|
|
"api": self.network.api_key, "token": token} |
1214
|
|
|
|
1215
|
|
|
self.web_auth_tokens[url] = token |
1216
|
|
|
|
1217
|
|
|
return url |
1218
|
|
|
|
1219
|
1 |
|
def get_web_auth_session_key(self, url): |
1220
|
|
|
""" |
1221
|
|
|
Retrieves the session key of a web authorization process by its url. |
1222
|
|
|
""" |
1223
|
|
|
|
1224
|
|
|
if url in self.web_auth_tokens.keys(): |
1225
|
|
|
token = self.web_auth_tokens[url] |
1226
|
|
|
else: |
1227
|
|
|
# That's going to raise a WSError of an unauthorized token when the |
1228
|
|
|
# request is executed. |
1229
|
|
|
token = "" |
1230
|
|
|
|
1231
|
|
|
request = _Request(self.network, 'auth.getSession', {'token': token}) |
1232
|
|
|
|
1233
|
|
|
# default action is that a request is signed only when |
1234
|
|
|
# a session key is provided. |
1235
|
|
|
request.sign_it() |
1236
|
|
|
|
1237
|
|
|
doc = request.execute() |
1238
|
|
|
|
1239
|
|
|
return doc.getElementsByTagName('key')[0].firstChild.data |
1240
|
|
|
|
1241
|
1 |
|
def get_session_key(self, username, password_hash): |
1242
|
|
|
""" |
1243
|
|
|
Retrieve a session key with a username and a md5 hash of the user's |
1244
|
|
|
password. |
1245
|
|
|
""" |
1246
|
|
|
|
1247
|
|
|
params = { |
1248
|
|
|
"username": username, "authToken": md5(username + password_hash)} |
1249
|
|
|
request = _Request(self.network, "auth.getMobileSession", params) |
1250
|
|
|
|
1251
|
|
|
# default action is that a request is signed only when |
1252
|
|
|
# a session key is provided. |
1253
|
|
|
request.sign_it() |
1254
|
|
|
|
1255
|
|
|
doc = request.execute() |
1256
|
|
|
|
1257
|
|
|
return _extract(doc, "key") |
1258
|
|
|
|
1259
|
1 |
|
TopItem = collections.namedtuple("TopItem", ["item", "weight"]) |
1260
|
1 |
|
SimilarItem = collections.namedtuple("SimilarItem", ["item", "match"]) |
1261
|
1 |
|
LibraryItem = collections.namedtuple( |
1262
|
|
|
"LibraryItem", ["item", "playcount", "tagcount"]) |
1263
|
1 |
|
PlayedTrack = collections.namedtuple( |
1264
|
|
|
"PlayedTrack", ["track", "album", "playback_date", "timestamp"]) |
1265
|
1 |
|
LovedTrack = collections.namedtuple( |
1266
|
|
|
"LovedTrack", ["track", "date", "timestamp"]) |
1267
|
1 |
|
ImageSizes = collections.namedtuple( |
1268
|
|
|
"ImageSizes", [ |
1269
|
|
|
"original", "large", "largesquare", "medium", "small", "extralarge"]) |
1270
|
1 |
|
Image = collections.namedtuple( |
1271
|
|
|
"Image", [ |
1272
|
|
|
"title", "url", "dateadded", "format", "owner", "sizes", "votes"]) |
1273
|
1 |
|
Shout = collections.namedtuple( |
1274
|
|
|
"Shout", ["body", "author", "date"]) |
1275
|
|
|
|
1276
|
|
|
|
1277
|
1 |
|
def _string_output(funct): |
1278
|
1 |
|
def r(*args): |
1279
|
1 |
|
return _string(funct(*args)) |
1280
|
|
|
|
1281
|
1 |
|
return r |
1282
|
|
|
|
1283
|
|
|
|
1284
|
1 |
|
def _pad_list(given_list, desired_length, padding=None): |
1285
|
|
|
""" |
1286
|
|
|
Pads a list to be of the desired_length. |
1287
|
|
|
""" |
1288
|
|
|
|
1289
|
|
|
while len(given_list) < desired_length: |
1290
|
|
|
given_list.append(padding) |
1291
|
|
|
|
1292
|
|
|
return given_list |
1293
|
|
|
|
1294
|
|
|
|
1295
|
1 |
|
class _BaseObject(object): |
1296
|
|
|
"""An abstract webservices object.""" |
1297
|
|
|
|
1298
|
1 |
|
network = None |
1299
|
|
|
|
1300
|
1 |
|
def __init__(self, network, ws_prefix): |
1301
|
1 |
|
self.network = network |
1302
|
1 |
|
self.ws_prefix = ws_prefix |
1303
|
|
|
|
1304
|
1 |
|
def _request(self, method_name, cacheable=False, params=None): |
1305
|
|
|
if not params: |
1306
|
|
|
params = self._get_params() |
1307
|
|
|
|
1308
|
|
|
return _Request(self.network, method_name, params).execute(cacheable) |
1309
|
|
|
|
1310
|
1 |
|
def _get_params(self): |
1311
|
|
|
"""Returns the most common set of parameters between all objects.""" |
1312
|
|
|
|
1313
|
|
|
return {} |
1314
|
|
|
|
1315
|
1 |
|
def __hash__(self): |
1316
|
|
|
# Convert any ints (or whatever) into strings |
1317
|
1 |
|
values = map(six.text_type, self._get_params().values()) |
1318
|
|
|
|
1319
|
1 |
|
return hash(self.network) + hash(six.text_type(type(self)) + "".join( |
1320
|
|
|
list(self._get_params().keys()) + list(values) |
1321
|
|
|
).lower()) |
1322
|
|
|
|
1323
|
1 |
|
def _extract_cdata_from_request(self, method_name, tag_name, params): |
1324
|
|
|
doc = self._request(method_name, True, params) |
1325
|
|
|
|
1326
|
|
|
return doc.getElementsByTagName( |
1327
|
|
|
tag_name)[0].firstChild.wholeText.strip() |
1328
|
|
|
|
1329
|
1 |
|
def _get_things( |
1330
|
|
|
self, method, thing, thing_type, params=None, cacheable=True): |
1331
|
|
|
"""Returns a list of the most played thing_types by this thing.""" |
1332
|
|
|
|
1333
|
|
|
doc = self._request( |
1334
|
|
|
self.ws_prefix + "." + method, cacheable, params) |
1335
|
|
|
|
1336
|
|
|
seq = [] |
1337
|
|
|
for node in doc.getElementsByTagName(thing): |
1338
|
|
|
title = _extract(node, "name") |
1339
|
|
|
artist = _extract(node, "name", 1) |
1340
|
|
|
playcount = _number(_extract(node, "playcount")) |
1341
|
|
|
|
1342
|
|
|
seq.append(TopItem( |
1343
|
|
|
thing_type(artist, title, self.network), playcount)) |
1344
|
|
|
|
1345
|
|
|
return seq |
1346
|
|
|
|
1347
|
1 |
|
def get_top_fans(self, limit=None, cacheable=True): |
1348
|
|
|
"""Returns a list of the Users who played this the most. |
1349
|
|
|
# Parameters: |
1350
|
|
|
* limit int: Max elements. |
1351
|
|
|
# For Artist/Track |
1352
|
|
|
""" |
1353
|
|
|
|
1354
|
|
|
doc = self._request(self.ws_prefix + '.getTopFans', cacheable) |
1355
|
|
|
|
1356
|
|
|
seq = [] |
1357
|
|
|
|
1358
|
|
|
elements = doc.getElementsByTagName('user') |
1359
|
|
|
|
1360
|
|
|
for element in elements: |
1361
|
|
|
if limit and len(seq) >= limit: |
1362
|
|
|
break |
1363
|
|
|
|
1364
|
|
|
name = _extract(element, 'name') |
1365
|
|
|
weight = _number(_extract(element, 'weight')) |
1366
|
|
|
|
1367
|
|
|
seq.append(TopItem(User(name, self.network), weight)) |
1368
|
|
|
|
1369
|
|
|
return seq |
1370
|
|
|
|
1371
|
1 |
|
def share(self, users, message=None): |
1372
|
|
|
""" |
1373
|
|
|
Shares this (sends out recommendations). |
1374
|
|
|
Parameters: |
1375
|
|
|
* users [User|str,]: A list that can contain usernames, emails, |
1376
|
|
|
User objects, or all of them. |
1377
|
|
|
* message str: A message to include in the recommendation message. |
1378
|
|
|
Only for Artist/Event/Track. |
1379
|
|
|
""" |
1380
|
|
|
|
1381
|
|
|
# Last.fm currently accepts a max of 10 recipient at a time |
1382
|
|
|
while(len(users) > 10): |
1383
|
|
|
section = users[0:9] |
1384
|
|
|
users = users[9:] |
1385
|
|
|
self.share(section, message) |
1386
|
|
|
|
1387
|
|
|
nusers = [] |
1388
|
|
|
for user in users: |
1389
|
|
|
if isinstance(user, User): |
1390
|
|
|
nusers.append(user.get_name()) |
1391
|
|
|
else: |
1392
|
|
|
nusers.append(user) |
1393
|
|
|
|
1394
|
|
|
params = self._get_params() |
1395
|
|
|
recipients = ','.join(nusers) |
1396
|
|
|
params['recipient'] = recipients |
1397
|
|
|
if message: |
1398
|
|
|
params['message'] = message |
1399
|
|
|
|
1400
|
|
|
self._request(self.ws_prefix + '.share', False, params) |
1401
|
|
|
|
1402
|
1 |
|
def get_wiki_published_date(self): |
1403
|
|
|
""" |
1404
|
|
|
Returns the summary of the wiki. |
1405
|
|
|
Only for Album/Track. |
1406
|
|
|
""" |
1407
|
|
|
return self.get_wiki("published") |
1408
|
|
|
|
1409
|
1 |
|
def get_wiki_summary(self): |
1410
|
|
|
""" |
1411
|
|
|
Returns the summary of the wiki. |
1412
|
|
|
Only for Album/Track. |
1413
|
|
|
""" |
1414
|
|
|
return self.get_wiki("summary") |
1415
|
|
|
|
1416
|
1 |
|
def get_wiki_content(self): |
1417
|
|
|
""" |
1418
|
|
|
Returns the summary of the wiki. |
1419
|
|
|
Only for Album/Track. |
1420
|
|
|
""" |
1421
|
|
|
return self.get_wiki("content") |
1422
|
|
|
|
1423
|
1 |
|
def get_wiki(self, section): |
1424
|
|
|
""" |
1425
|
|
|
Returns a section of the wiki. |
1426
|
|
|
Only for Album/Track. |
1427
|
|
|
section can be "content", "summary" or |
1428
|
|
|
"published" (for published date) |
1429
|
|
|
""" |
1430
|
|
|
|
1431
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
1432
|
|
|
|
1433
|
|
|
if len(doc.getElementsByTagName("wiki")) == 0: |
1434
|
|
|
return |
1435
|
|
|
|
1436
|
|
|
node = doc.getElementsByTagName("wiki")[0] |
1437
|
|
|
|
1438
|
|
|
return _extract(node, section) |
1439
|
|
|
|
1440
|
1 |
|
def get_shouts(self, limit=50, cacheable=False): |
1441
|
|
|
""" |
1442
|
|
|
Returns a sequence of Shout objects |
1443
|
|
|
""" |
1444
|
|
|
|
1445
|
|
|
shouts = [] |
1446
|
|
|
for node in _collect_nodes( |
1447
|
|
|
limit, |
1448
|
|
|
self, |
1449
|
|
|
self.ws_prefix + ".getShouts", |
1450
|
|
|
cacheable): |
1451
|
|
|
shouts.append( |
1452
|
|
|
Shout( |
1453
|
|
|
_extract(node, "body"), |
1454
|
|
|
User(_extract(node, "author"), self.network), |
1455
|
|
|
_extract(node, "date") |
1456
|
|
|
) |
1457
|
|
|
) |
1458
|
|
|
return shouts |
1459
|
|
|
|
1460
|
|
|
|
1461
|
1 |
|
class _Chartable(object): |
1462
|
|
|
"""Common functions for classes with charts.""" |
1463
|
|
|
|
1464
|
1 |
|
def __init__(self, ws_prefix): |
1465
|
|
|
self.ws_prefix = ws_prefix # TODO move to _BaseObject? |
1466
|
|
|
|
1467
|
1 |
|
def get_weekly_chart_dates(self): |
1468
|
|
|
"""Returns a list of From and To tuples for the available charts.""" |
1469
|
|
|
|
1470
|
|
|
doc = self._request(self.ws_prefix + ".getWeeklyChartList", True) |
1471
|
|
|
|
1472
|
|
|
seq = [] |
1473
|
|
|
for node in doc.getElementsByTagName("chart"): |
1474
|
|
|
seq.append((node.getAttribute("from"), node.getAttribute("to"))) |
1475
|
|
|
|
1476
|
|
|
return seq |
1477
|
|
|
|
1478
|
1 |
|
def get_weekly_album_charts(self, from_date=None, to_date=None): |
1479
|
|
|
""" |
1480
|
|
|
Returns the weekly album charts for the week starting from the |
1481
|
|
|
from_date value to the to_date value. |
1482
|
|
|
Only for Group or User. |
1483
|
|
|
""" |
1484
|
|
|
return self.get_weekly_charts("album", from_date, to_date) |
1485
|
|
|
|
1486
|
1 |
|
def get_weekly_artist_charts(self, from_date=None, to_date=None): |
1487
|
|
|
""" |
1488
|
|
|
Returns the weekly artist charts for the week starting from the |
1489
|
|
|
from_date value to the to_date value. |
1490
|
|
|
Only for Group, Tag or User. |
1491
|
|
|
""" |
1492
|
|
|
return self.get_weekly_charts("artist", from_date, to_date) |
1493
|
|
|
|
1494
|
1 |
|
def get_weekly_track_charts(self, from_date=None, to_date=None): |
1495
|
|
|
""" |
1496
|
|
|
Returns the weekly track charts for the week starting from the |
1497
|
|
|
from_date value to the to_date value. |
1498
|
|
|
Only for Group or User. |
1499
|
|
|
""" |
1500
|
|
|
return self.get_weekly_charts("track", from_date, to_date) |
1501
|
|
|
|
1502
|
1 |
|
def get_weekly_charts(self, chart_kind, from_date=None, to_date=None): |
1503
|
|
|
""" |
1504
|
|
|
Returns the weekly charts for the week starting from the |
1505
|
|
|
from_date value to the to_date value. |
1506
|
|
|
chart_kind should be one of "album", "artist" or "track" |
1507
|
|
|
""" |
1508
|
|
|
method = ".getWeekly" + chart_kind.title() + "Chart" |
1509
|
|
|
chart_type = eval(chart_kind.title()) # string to type |
1510
|
|
|
|
1511
|
|
|
params = self._get_params() |
1512
|
|
|
if from_date and to_date: |
1513
|
|
|
params["from"] = from_date |
1514
|
|
|
params["to"] = to_date |
1515
|
|
|
|
1516
|
|
|
doc = self._request( |
1517
|
|
|
self.ws_prefix + method, True, params) |
1518
|
|
|
|
1519
|
|
|
seq = [] |
1520
|
|
|
for node in doc.getElementsByTagName(chart_kind.lower()): |
1521
|
|
|
item = chart_type( |
1522
|
|
|
_extract(node, "artist"), _extract(node, "name"), self.network) |
1523
|
|
|
weight = _number(_extract(node, "playcount")) |
1524
|
|
|
seq.append(TopItem(item, weight)) |
1525
|
|
|
|
1526
|
|
|
return seq |
1527
|
|
|
|
1528
|
|
|
|
1529
|
1 |
|
class _Taggable(object): |
1530
|
|
|
"""Common functions for classes with tags.""" |
1531
|
|
|
|
1532
|
1 |
|
def __init__(self, ws_prefix): |
1533
|
1 |
|
self.ws_prefix = ws_prefix # TODO move to _BaseObject |
1534
|
|
|
|
1535
|
1 |
|
def add_tags(self, tags): |
1536
|
|
|
"""Adds one or several tags. |
1537
|
|
|
* tags: A sequence of tag names or Tag objects. |
1538
|
|
|
""" |
1539
|
|
|
|
1540
|
|
|
for tag in tags: |
1541
|
|
|
self.add_tag(tag) |
1542
|
|
|
|
1543
|
1 |
|
def add_tag(self, tag): |
1544
|
|
|
"""Adds one tag. |
1545
|
|
|
* tag: a tag name or a Tag object. |
1546
|
|
|
""" |
1547
|
|
|
|
1548
|
|
|
if isinstance(tag, Tag): |
1549
|
|
|
tag = tag.get_name() |
1550
|
|
|
|
1551
|
|
|
params = self._get_params() |
1552
|
|
|
params['tags'] = tag |
1553
|
|
|
|
1554
|
|
|
self._request(self.ws_prefix + '.addTags', False, params) |
1555
|
|
|
|
1556
|
1 |
|
def remove_tag(self, tag): |
1557
|
|
|
"""Remove a user's tag from this object.""" |
1558
|
|
|
|
1559
|
|
|
if isinstance(tag, Tag): |
1560
|
|
|
tag = tag.get_name() |
1561
|
|
|
|
1562
|
|
|
params = self._get_params() |
1563
|
|
|
params['tag'] = tag |
1564
|
|
|
|
1565
|
|
|
self._request(self.ws_prefix + '.removeTag', False, params) |
1566
|
|
|
|
1567
|
1 |
|
def get_tags(self): |
1568
|
|
|
"""Returns a list of the tags set by the user to this object.""" |
1569
|
|
|
|
1570
|
|
|
# Uncacheable because it can be dynamically changed by the user. |
1571
|
|
|
params = self._get_params() |
1572
|
|
|
|
1573
|
|
|
doc = self._request(self.ws_prefix + '.getTags', False, params) |
1574
|
|
|
tag_names = _extract_all(doc, 'name') |
1575
|
|
|
tags = [] |
1576
|
|
|
for tag in tag_names: |
1577
|
|
|
tags.append(Tag(tag, self.network)) |
1578
|
|
|
|
1579
|
|
|
return tags |
1580
|
|
|
|
1581
|
1 |
|
def remove_tags(self, tags): |
1582
|
|
|
"""Removes one or several tags from this object. |
1583
|
|
|
* tags: a sequence of tag names or Tag objects. |
1584
|
|
|
""" |
1585
|
|
|
|
1586
|
|
|
for tag in tags: |
1587
|
|
|
self.remove_tag(tag) |
1588
|
|
|
|
1589
|
1 |
|
def clear_tags(self): |
1590
|
|
|
"""Clears all the user-set tags. """ |
1591
|
|
|
|
1592
|
|
|
self.remove_tags(*(self.get_tags())) |
1593
|
|
|
|
1594
|
1 |
|
def set_tags(self, tags): |
1595
|
|
|
"""Sets this object's tags to only those tags. |
1596
|
|
|
* tags: a sequence of tag names or Tag objects. |
1597
|
|
|
""" |
1598
|
|
|
|
1599
|
|
|
c_old_tags = [] |
1600
|
|
|
old_tags = [] |
1601
|
|
|
c_new_tags = [] |
1602
|
|
|
new_tags = [] |
1603
|
|
|
|
1604
|
|
|
to_remove = [] |
1605
|
|
|
to_add = [] |
1606
|
|
|
|
1607
|
|
|
tags_on_server = self.get_tags() |
1608
|
|
|
|
1609
|
|
|
for tag in tags_on_server: |
1610
|
|
|
c_old_tags.append(tag.get_name().lower()) |
1611
|
|
|
old_tags.append(tag.get_name()) |
1612
|
|
|
|
1613
|
|
|
for tag in tags: |
1614
|
|
|
c_new_tags.append(tag.lower()) |
1615
|
|
|
new_tags.append(tag) |
1616
|
|
|
|
1617
|
|
|
for i in range(0, len(old_tags)): |
1618
|
|
|
if not c_old_tags[i] in c_new_tags: |
1619
|
|
|
to_remove.append(old_tags[i]) |
1620
|
|
|
|
1621
|
|
|
for i in range(0, len(new_tags)): |
1622
|
|
|
if not c_new_tags[i] in c_old_tags: |
1623
|
|
|
to_add.append(new_tags[i]) |
1624
|
|
|
|
1625
|
|
|
self.remove_tags(to_remove) |
1626
|
|
|
self.add_tags(to_add) |
1627
|
|
|
|
1628
|
1 |
|
def get_top_tags(self, limit=None): |
1629
|
|
|
"""Returns a list of the most frequently used Tags on this object.""" |
1630
|
|
|
|
1631
|
|
|
doc = self._request(self.ws_prefix + '.getTopTags', True) |
1632
|
|
|
|
1633
|
|
|
elements = doc.getElementsByTagName('tag') |
1634
|
|
|
seq = [] |
1635
|
|
|
|
1636
|
|
|
for element in elements: |
1637
|
|
|
tag_name = _extract(element, 'name') |
1638
|
|
|
tagcount = _extract(element, 'count') |
1639
|
|
|
|
1640
|
|
|
seq.append(TopItem(Tag(tag_name, self.network), tagcount)) |
1641
|
|
|
|
1642
|
|
|
if limit: |
1643
|
|
|
seq = seq[:limit] |
1644
|
|
|
|
1645
|
|
|
return seq |
1646
|
|
|
|
1647
|
|
|
|
1648
|
1 |
|
class WSError(Exception): |
1649
|
|
|
"""Exception related to the Network web service""" |
1650
|
|
|
|
1651
|
1 |
|
def __init__(self, network, status, details): |
1652
|
|
|
self.status = status |
1653
|
|
|
self.details = details |
1654
|
|
|
self.network = network |
1655
|
|
|
|
1656
|
1 |
|
@_string_output |
1657
|
|
|
def __str__(self): |
1658
|
|
|
return self.details |
1659
|
|
|
|
1660
|
1 |
|
def get_id(self): |
1661
|
|
|
"""Returns the exception ID, from one of the following: |
1662
|
|
|
STATUS_INVALID_SERVICE = 2 |
1663
|
|
|
STATUS_INVALID_METHOD = 3 |
1664
|
|
|
STATUS_AUTH_FAILED = 4 |
1665
|
|
|
STATUS_INVALID_FORMAT = 5 |
1666
|
|
|
STATUS_INVALID_PARAMS = 6 |
1667
|
|
|
STATUS_INVALID_RESOURCE = 7 |
1668
|
|
|
STATUS_TOKEN_ERROR = 8 |
1669
|
|
|
STATUS_INVALID_SK = 9 |
1670
|
|
|
STATUS_INVALID_API_KEY = 10 |
1671
|
|
|
STATUS_OFFLINE = 11 |
1672
|
|
|
STATUS_SUBSCRIBERS_ONLY = 12 |
1673
|
|
|
STATUS_TOKEN_UNAUTHORIZED = 14 |
1674
|
|
|
STATUS_TOKEN_EXPIRED = 15 |
1675
|
|
|
""" |
1676
|
|
|
|
1677
|
|
|
return self.status |
1678
|
|
|
|
1679
|
|
|
|
1680
|
1 |
|
class MalformedResponseError(Exception): |
1681
|
|
|
"""Exception conveying a malformed response from Last.fm.""" |
1682
|
|
|
|
1683
|
1 |
|
def __init__(self, network, underlying_error): |
1684
|
|
|
self.network = network |
1685
|
|
|
self.underlying_error = underlying_error |
1686
|
|
|
|
1687
|
1 |
|
def __str__(self): |
1688
|
|
|
return "Malformed response from Last.fm. Underlying error: %s" % str( |
1689
|
|
|
self.underlying_error) |
1690
|
|
|
|
1691
|
|
|
|
1692
|
1 |
|
class NetworkError(Exception): |
1693
|
|
|
"""Exception conveying a problem in sending a request to Last.fm""" |
1694
|
|
|
|
1695
|
1 |
|
def __init__(self, network, underlying_error): |
1696
|
|
|
self.network = network |
1697
|
|
|
self.underlying_error = underlying_error |
1698
|
|
|
|
1699
|
1 |
|
def __str__(self): |
1700
|
|
|
return "NetworkError: %s" % str(self.underlying_error) |
1701
|
|
|
|
1702
|
|
|
|
1703
|
1 |
|
class _Opus(_BaseObject, _Taggable): |
1704
|
|
|
"""An album or track.""" |
1705
|
|
|
|
1706
|
1 |
|
artist = None |
1707
|
1 |
|
title = None |
1708
|
1 |
|
username = None |
1709
|
|
|
|
1710
|
1 |
|
__hash__ = _BaseObject.__hash__ |
1711
|
|
|
|
1712
|
1 |
|
def __init__(self, artist, title, network, ws_prefix, username=None): |
1713
|
|
|
""" |
1714
|
|
|
Create an opus instance. |
1715
|
|
|
# Parameters: |
1716
|
|
|
* artist: An artist name or an Artist object. |
1717
|
|
|
* title: The album or track title. |
1718
|
|
|
* ws_prefix: 'album' or 'track' |
1719
|
|
|
""" |
1720
|
|
|
|
1721
|
|
|
_BaseObject.__init__(self, network, ws_prefix) |
1722
|
|
|
_Taggable.__init__(self, ws_prefix) |
1723
|
|
|
|
1724
|
|
|
if isinstance(artist, Artist): |
1725
|
|
|
self.artist = artist |
1726
|
|
|
else: |
1727
|
|
|
self.artist = Artist(artist, self.network) |
1728
|
|
|
|
1729
|
|
|
self.title = title |
1730
|
|
|
self.username = username |
1731
|
|
|
|
1732
|
1 |
|
def __repr__(self): |
1733
|
|
|
return "pylast.%s(%s, %s, %s)" % ( |
1734
|
|
|
self.ws_prefix.title(), repr(self.artist.name), |
1735
|
|
|
repr(self.title), repr(self.network)) |
1736
|
|
|
|
1737
|
1 |
|
@_string_output |
1738
|
|
|
def __str__(self): |
1739
|
|
|
return _unicode("%s - %s") % ( |
1740
|
|
|
self.get_artist().get_name(), self.get_title()) |
1741
|
|
|
|
1742
|
1 |
|
def __eq__(self, other): |
1743
|
|
|
if type(self) != type(other): |
1744
|
|
|
return False |
1745
|
|
|
a = self.get_title().lower() |
1746
|
|
|
b = other.get_title().lower() |
1747
|
|
|
c = self.get_artist().get_name().lower() |
1748
|
|
|
d = other.get_artist().get_name().lower() |
1749
|
|
|
return (a == b) and (c == d) |
1750
|
|
|
|
1751
|
1 |
|
def __ne__(self, other): |
1752
|
|
|
return not self.__eq__(other) |
1753
|
|
|
|
1754
|
1 |
|
def _get_params(self): |
1755
|
|
|
return { |
1756
|
|
|
'artist': self.get_artist().get_name(), |
1757
|
|
|
self.ws_prefix: self.get_title()} |
1758
|
|
|
|
1759
|
1 |
|
def get_artist(self): |
1760
|
|
|
"""Returns the associated Artist object.""" |
1761
|
|
|
|
1762
|
|
|
return self.artist |
1763
|
|
|
|
1764
|
1 |
|
def get_title(self, properly_capitalized=False): |
1765
|
|
|
"""Returns the artist or track title.""" |
1766
|
|
|
if properly_capitalized: |
1767
|
|
|
self.title = _extract( |
1768
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
1769
|
|
|
|
1770
|
|
|
return self.title |
1771
|
|
|
|
1772
|
1 |
|
def get_name(self, properly_capitalized=False): |
1773
|
|
|
"""Returns the album or track title (alias to get_title()).""" |
1774
|
|
|
|
1775
|
|
|
return self.get_title(properly_capitalized) |
1776
|
|
|
|
1777
|
1 |
|
def get_id(self): |
1778
|
|
|
"""Returns the ID on the network.""" |
1779
|
|
|
|
1780
|
|
|
return _extract( |
1781
|
|
|
self._request(self.ws_prefix + ".getInfo", cacheable=True), "id") |
1782
|
|
|
|
1783
|
1 |
|
def get_playcount(self): |
1784
|
|
|
"""Returns the number of plays on the network""" |
1785
|
|
|
|
1786
|
|
|
return _number(_extract( |
1787
|
|
|
self._request( |
1788
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "playcount")) |
1789
|
|
|
|
1790
|
1 |
|
def get_userplaycount(self): |
1791
|
|
|
"""Returns the number of plays by a given username""" |
1792
|
|
|
|
1793
|
|
|
if not self.username: |
1794
|
|
|
return |
1795
|
|
|
|
1796
|
|
|
params = self._get_params() |
1797
|
|
|
params['username'] = self.username |
1798
|
|
|
|
1799
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
1800
|
|
|
return _number(_extract(doc, "userplaycount")) |
1801
|
|
|
|
1802
|
1 |
|
def get_listener_count(self): |
1803
|
|
|
"""Returns the number of listeners on the network""" |
1804
|
|
|
|
1805
|
|
|
return _number(_extract( |
1806
|
|
|
self._request( |
1807
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "listeners")) |
1808
|
|
|
|
1809
|
1 |
|
def get_mbid(self): |
1810
|
|
|
"""Returns the MusicBrainz ID of the album or track.""" |
1811
|
|
|
|
1812
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", cacheable=True) |
1813
|
|
|
|
1814
|
|
|
try: |
1815
|
|
|
lfm = doc.getElementsByTagName('lfm')[0] |
1816
|
|
|
opus = self._get_children_by_tag_name(lfm, self.ws_prefix).next() |
1817
|
|
|
mbid = self._get_children_by_tag_name(opus, "mbid").next() |
1818
|
|
|
return mbid.firstChild.nodeValue |
1819
|
|
|
except StopIteration: |
1820
|
|
|
return None |
1821
|
|
|
|
1822
|
1 |
|
def _get_children_by_tag_name(self, node, tag_name): |
1823
|
|
|
for child in node.childNodes: |
1824
|
|
|
if (child.nodeType == child.ELEMENT_NODE and |
1825
|
|
|
(tag_name == '*' or child.tagName == tag_name)): |
1826
|
|
|
yield child |
1827
|
|
|
|
1828
|
|
|
|
1829
|
1 |
|
class Album(_Opus): |
1830
|
|
|
"""An album.""" |
1831
|
|
|
|
1832
|
1 |
|
__hash__ = _Opus.__hash__ |
1833
|
|
|
|
1834
|
1 |
|
def __init__(self, artist, title, network, username=None): |
1835
|
|
|
super(Album, self).__init__(artist, title, network, "album", username) |
1836
|
|
|
|
1837
|
1 |
|
def get_release_date(self): |
1838
|
|
|
"""Returns the release date of the album.""" |
1839
|
|
|
|
1840
|
|
|
return _extract(self._request( |
1841
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "releasedate") |
1842
|
|
|
|
1843
|
1 |
|
def get_cover_image(self, size=COVER_EXTRA_LARGE): |
1844
|
|
|
""" |
1845
|
|
|
Returns a uri to the cover image |
1846
|
|
|
size can be one of: |
1847
|
|
|
COVER_EXTRA_LARGE |
1848
|
|
|
COVER_LARGE |
1849
|
|
|
COVER_MEDIUM |
1850
|
|
|
COVER_SMALL |
1851
|
|
|
""" |
1852
|
|
|
|
1853
|
|
|
return _extract_all( |
1854
|
|
|
self._request( |
1855
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), 'image')[size] |
1856
|
|
|
|
1857
|
1 |
|
def get_tracks(self): |
1858
|
|
|
"""Returns the list of Tracks on this album.""" |
1859
|
|
|
|
1860
|
|
|
return _extract_tracks( |
1861
|
|
|
self._request( |
1862
|
|
|
self.ws_prefix + ".getInfo", cacheable=True), "tracks") |
1863
|
|
|
|
1864
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
1865
|
|
|
"""Returns the URL of the album or track page on the network. |
1866
|
|
|
# Parameters: |
1867
|
|
|
* domain_name str: The network's language domain. Possible values: |
1868
|
|
|
o DOMAIN_ENGLISH |
1869
|
|
|
o DOMAIN_GERMAN |
1870
|
|
|
o DOMAIN_SPANISH |
1871
|
|
|
o DOMAIN_FRENCH |
1872
|
|
|
o DOMAIN_ITALIAN |
1873
|
|
|
o DOMAIN_POLISH |
1874
|
|
|
o DOMAIN_PORTUGUESE |
1875
|
|
|
o DOMAIN_SWEDISH |
1876
|
|
|
o DOMAIN_TURKISH |
1877
|
|
|
o DOMAIN_RUSSIAN |
1878
|
|
|
o DOMAIN_JAPANESE |
1879
|
|
|
o DOMAIN_CHINESE |
1880
|
|
|
""" |
1881
|
|
|
|
1882
|
|
|
artist = _url_safe(self.get_artist().get_name()) |
1883
|
|
|
title = _url_safe(self.get_title()) |
1884
|
|
|
|
1885
|
|
|
return self.network._get_url( |
1886
|
|
|
domain_name, self.ws_prefix) % { |
1887
|
|
|
'artist': artist, 'album': title} |
1888
|
|
|
|
1889
|
|
|
|
1890
|
1 |
|
class Artist(_BaseObject, _Taggable): |
1891
|
|
|
"""An artist.""" |
1892
|
|
|
|
1893
|
1 |
|
name = None |
1894
|
1 |
|
username = None |
1895
|
|
|
|
1896
|
1 |
|
__hash__ = _BaseObject.__hash__ |
1897
|
|
|
|
1898
|
1 |
|
def __init__(self, name, network, username=None): |
1899
|
|
|
"""Create an artist object. |
1900
|
|
|
# Parameters: |
1901
|
|
|
* name str: The artist's name. |
1902
|
|
|
""" |
1903
|
|
|
|
1904
|
1 |
|
_BaseObject.__init__(self, network, 'artist') |
1905
|
1 |
|
_Taggable.__init__(self, 'artist') |
1906
|
|
|
|
1907
|
1 |
|
self.name = name |
1908
|
1 |
|
self.username = username |
1909
|
|
|
|
1910
|
1 |
|
def __repr__(self): |
1911
|
|
|
return "pylast.Artist(%s, %s)" % ( |
1912
|
|
|
repr(self.get_name()), repr(self.network)) |
1913
|
|
|
|
1914
|
1 |
|
def __unicode__(self): |
1915
|
1 |
|
return six.text_type(self.get_name()) |
1916
|
|
|
|
1917
|
1 |
|
@_string_output |
1918
|
|
|
def __str__(self): |
1919
|
1 |
|
return self.__unicode__() |
1920
|
|
|
|
1921
|
1 |
|
def __eq__(self, other): |
1922
|
|
|
if type(self) is type(other): |
1923
|
|
|
return self.get_name().lower() == other.get_name().lower() |
1924
|
|
|
else: |
1925
|
|
|
return False |
1926
|
|
|
|
1927
|
1 |
|
def __ne__(self, other): |
1928
|
|
|
return not self.__eq__(other) |
1929
|
|
|
|
1930
|
1 |
|
def _get_params(self): |
1931
|
1 |
|
return {self.ws_prefix: self.get_name()} |
1932
|
|
|
|
1933
|
1 |
|
def get_name(self, properly_capitalized=False): |
1934
|
|
|
"""Returns the name of the artist. |
1935
|
|
|
If properly_capitalized was asserted then the name would be downloaded |
1936
|
|
|
overwriting the given one.""" |
1937
|
|
|
|
1938
|
1 |
|
if properly_capitalized: |
1939
|
|
|
self.name = _extract( |
1940
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
1941
|
|
|
|
1942
|
1 |
|
return self.name |
1943
|
|
|
|
1944
|
1 |
|
def get_correction(self): |
1945
|
|
|
"""Returns the corrected artist name.""" |
1946
|
|
|
|
1947
|
|
|
return _extract( |
1948
|
|
|
self._request(self.ws_prefix + ".getCorrection"), "name") |
1949
|
|
|
|
1950
|
1 |
|
def get_cover_image(self, size=COVER_MEGA): |
1951
|
|
|
""" |
1952
|
|
|
Returns a uri to the cover image |
1953
|
|
|
size can be one of: |
1954
|
|
|
COVER_MEGA |
1955
|
|
|
COVER_EXTRA_LARGE |
1956
|
|
|
COVER_LARGE |
1957
|
|
|
COVER_MEDIUM |
1958
|
|
|
COVER_SMALL |
1959
|
|
|
""" |
1960
|
|
|
|
1961
|
|
|
return _extract_all( |
1962
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "image")[size] |
1963
|
|
|
|
1964
|
1 |
|
def get_playcount(self): |
1965
|
|
|
"""Returns the number of plays on the network.""" |
1966
|
|
|
|
1967
|
|
|
return _number(_extract( |
1968
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "playcount")) |
1969
|
|
|
|
1970
|
1 |
|
def get_userplaycount(self): |
1971
|
|
|
"""Returns the number of plays by a given username""" |
1972
|
|
|
|
1973
|
|
|
if not self.username: |
1974
|
|
|
return |
1975
|
|
|
|
1976
|
|
|
params = self._get_params() |
1977
|
|
|
params['username'] = self.username |
1978
|
|
|
|
1979
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
1980
|
|
|
return _number(_extract(doc, "userplaycount")) |
1981
|
|
|
|
1982
|
1 |
|
def get_mbid(self): |
1983
|
|
|
"""Returns the MusicBrainz ID of this artist.""" |
1984
|
|
|
|
1985
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
1986
|
|
|
|
1987
|
|
|
return _extract(doc, "mbid") |
1988
|
|
|
|
1989
|
1 |
|
def get_listener_count(self): |
1990
|
|
|
"""Returns the number of listeners on the network.""" |
1991
|
|
|
|
1992
|
|
|
if hasattr(self, "listener_count"): |
1993
|
|
|
return self.listener_count |
1994
|
|
|
else: |
1995
|
|
|
self.listener_count = _number(_extract( |
1996
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "listeners")) |
1997
|
|
|
return self.listener_count |
1998
|
|
|
|
1999
|
1 |
|
def is_streamable(self): |
2000
|
|
|
"""Returns True if the artist is streamable.""" |
2001
|
|
|
|
2002
|
|
|
return bool(_number(_extract( |
2003
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "streamable"))) |
2004
|
|
|
|
2005
|
1 |
|
def get_bio(self, section, language=None): |
2006
|
|
|
""" |
2007
|
|
|
Returns a section of the bio. |
2008
|
|
|
section can be "content", "summary" or |
2009
|
|
|
"published" (for published date) |
2010
|
|
|
""" |
2011
|
|
|
if language: |
2012
|
|
|
params = self._get_params() |
2013
|
|
|
params["lang"] = language |
2014
|
|
|
else: |
2015
|
|
|
params = None |
2016
|
|
|
|
2017
|
|
|
return self._extract_cdata_from_request( |
2018
|
|
|
self.ws_prefix + ".getInfo", section, params) |
2019
|
|
|
|
2020
|
1 |
|
def get_bio_published_date(self): |
2021
|
|
|
"""Returns the date on which the artist's biography was published.""" |
2022
|
|
|
return self.get_bio("published") |
2023
|
|
|
|
2024
|
1 |
|
def get_bio_summary(self, language=None): |
2025
|
|
|
"""Returns the summary of the artist's biography.""" |
2026
|
|
|
return self.get_bio("summary", language) |
2027
|
|
|
|
2028
|
1 |
|
def get_bio_content(self, language=None): |
2029
|
|
|
"""Returns the content of the artist's biography.""" |
2030
|
|
|
return self.get_bio("content", language) |
2031
|
|
|
|
2032
|
1 |
|
def get_upcoming_events(self): |
2033
|
|
|
"""Returns a list of the upcoming Events for this artist.""" |
2034
|
|
|
|
2035
|
|
|
doc = self._request(self.ws_prefix + '.getEvents', True) |
2036
|
|
|
|
2037
|
|
|
return _extract_events_from_doc(doc, self.network) |
2038
|
|
|
|
2039
|
1 |
|
def get_similar(self, limit=None): |
2040
|
|
|
"""Returns the similar artists on the network.""" |
2041
|
|
|
|
2042
|
|
|
params = self._get_params() |
2043
|
|
|
if limit: |
2044
|
|
|
params['limit'] = limit |
2045
|
|
|
|
2046
|
|
|
doc = self._request(self.ws_prefix + '.getSimilar', True, params) |
2047
|
|
|
|
2048
|
|
|
names = _extract_all(doc, "name") |
2049
|
|
|
matches = _extract_all(doc, "match") |
2050
|
|
|
|
2051
|
|
|
artists = [] |
2052
|
|
|
for i in range(0, len(names)): |
2053
|
|
|
artists.append(SimilarItem( |
2054
|
|
|
Artist(names[i], self.network), _number(matches[i]))) |
2055
|
|
|
|
2056
|
|
|
return artists |
2057
|
|
|
|
2058
|
1 |
|
def get_top_albums(self, limit=None, cacheable=True): |
2059
|
|
|
"""Returns a list of the top albums.""" |
2060
|
|
|
params = self._get_params() |
2061
|
|
|
if limit: |
2062
|
|
|
params['limit'] = limit |
2063
|
|
|
|
2064
|
|
|
return self._get_things( |
2065
|
|
|
"getTopAlbums", "album", Album, params, cacheable) |
2066
|
|
|
|
2067
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
2068
|
|
|
"""Returns a list of the most played Tracks by this artist.""" |
2069
|
|
|
params = self._get_params() |
2070
|
|
|
if limit: |
2071
|
|
|
params['limit'] = limit |
2072
|
|
|
|
2073
|
|
|
return self._get_things( |
2074
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2075
|
|
|
|
2076
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2077
|
|
|
"""Returns the url of the artist page on the network. |
2078
|
|
|
# Parameters: |
2079
|
|
|
* domain_name: The network's language domain. Possible values: |
2080
|
|
|
o DOMAIN_ENGLISH |
2081
|
|
|
o DOMAIN_GERMAN |
2082
|
|
|
o DOMAIN_SPANISH |
2083
|
|
|
o DOMAIN_FRENCH |
2084
|
|
|
o DOMAIN_ITALIAN |
2085
|
|
|
o DOMAIN_POLISH |
2086
|
|
|
o DOMAIN_PORTUGUESE |
2087
|
|
|
o DOMAIN_SWEDISH |
2088
|
|
|
o DOMAIN_TURKISH |
2089
|
|
|
o DOMAIN_RUSSIAN |
2090
|
|
|
o DOMAIN_JAPANESE |
2091
|
|
|
o DOMAIN_CHINESE |
2092
|
|
|
""" |
2093
|
|
|
|
2094
|
|
|
artist = _url_safe(self.get_name()) |
2095
|
|
|
|
2096
|
|
|
return self.network._get_url( |
2097
|
|
|
domain_name, "artist") % {'artist': artist} |
2098
|
|
|
|
2099
|
1 |
|
def shout(self, message): |
2100
|
|
|
""" |
2101
|
|
|
Post a shout |
2102
|
|
|
""" |
2103
|
|
|
|
2104
|
|
|
params = self._get_params() |
2105
|
|
|
params["message"] = message |
2106
|
|
|
|
2107
|
|
|
self._request("artist.Shout", False, params) |
2108
|
|
|
|
2109
|
1 |
|
def get_band_members(self): |
2110
|
|
|
"""Returns a list of band members or None if unknown.""" |
2111
|
|
|
|
2112
|
|
|
names = None |
2113
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
2114
|
|
|
|
2115
|
|
|
for node in doc.getElementsByTagName("bandmembers"): |
2116
|
|
|
names = _extract_all(node, "name") |
2117
|
|
|
|
2118
|
|
|
return names |
2119
|
|
|
|
2120
|
|
|
|
2121
|
1 |
|
class Event(_BaseObject): |
2122
|
|
|
"""An event.""" |
2123
|
|
|
|
2124
|
1 |
|
id = None |
2125
|
|
|
|
2126
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2127
|
|
|
|
2128
|
1 |
|
def __init__(self, event_id, network): |
2129
|
|
|
_BaseObject.__init__(self, network, 'event') |
2130
|
|
|
|
2131
|
|
|
self.id = event_id |
2132
|
|
|
|
2133
|
1 |
|
def __repr__(self): |
2134
|
|
|
return "pylast.Event(%s, %s)" % (repr(self.id), repr(self.network)) |
2135
|
|
|
|
2136
|
1 |
|
@_string_output |
2137
|
|
|
def __str__(self): |
2138
|
|
|
return "Event #" + str(self.get_id()) |
2139
|
|
|
|
2140
|
1 |
|
def __eq__(self, other): |
2141
|
|
|
if type(self) is type(other): |
2142
|
|
|
return self.get_id() == other.get_id() |
2143
|
|
|
else: |
2144
|
|
|
return False |
2145
|
|
|
|
2146
|
1 |
|
def __ne__(self, other): |
2147
|
|
|
return not self.__eq__(other) |
2148
|
|
|
|
2149
|
1 |
|
def _get_params(self): |
2150
|
|
|
return {'event': self.get_id()} |
2151
|
|
|
|
2152
|
1 |
|
def attend(self, attending_status): |
2153
|
|
|
"""Sets the attending status. |
2154
|
|
|
* attending_status: The attending status. Possible values: |
2155
|
|
|
o EVENT_ATTENDING |
2156
|
|
|
o EVENT_MAYBE_ATTENDING |
2157
|
|
|
o EVENT_NOT_ATTENDING |
2158
|
|
|
""" |
2159
|
|
|
|
2160
|
|
|
params = self._get_params() |
2161
|
|
|
params['status'] = attending_status |
2162
|
|
|
|
2163
|
|
|
self._request('event.attend', False, params) |
2164
|
|
|
|
2165
|
1 |
|
def get_attendees(self): |
2166
|
|
|
""" |
2167
|
|
|
Get a list of attendees for an event |
2168
|
|
|
""" |
2169
|
|
|
|
2170
|
|
|
doc = self._request("event.getAttendees", False) |
2171
|
|
|
|
2172
|
|
|
users = [] |
2173
|
|
|
for name in _extract_all(doc, "name"): |
2174
|
|
|
users.append(User(name, self.network)) |
2175
|
|
|
|
2176
|
|
|
return users |
2177
|
|
|
|
2178
|
1 |
|
def get_id(self): |
2179
|
|
|
"""Returns the id of the event on the network. """ |
2180
|
|
|
|
2181
|
|
|
return self.id |
2182
|
|
|
|
2183
|
1 |
|
def get_title(self): |
2184
|
|
|
"""Returns the title of the event. """ |
2185
|
|
|
|
2186
|
|
|
doc = self._request("event.getInfo", True) |
2187
|
|
|
|
2188
|
|
|
return _extract(doc, "title") |
2189
|
|
|
|
2190
|
1 |
|
def get_headliner(self): |
2191
|
|
|
"""Returns the headliner of the event. """ |
2192
|
|
|
|
2193
|
|
|
doc = self._request("event.getInfo", True) |
2194
|
|
|
|
2195
|
|
|
return Artist(_extract(doc, "headliner"), self.network) |
2196
|
|
|
|
2197
|
1 |
|
def get_artists(self): |
2198
|
|
|
"""Returns a list of the participating Artists. """ |
2199
|
|
|
|
2200
|
|
|
doc = self._request("event.getInfo", True) |
2201
|
|
|
names = _extract_all(doc, "artist") |
2202
|
|
|
|
2203
|
|
|
artists = [] |
2204
|
|
|
for name in names: |
2205
|
|
|
artists.append(Artist(name, self.network)) |
2206
|
|
|
|
2207
|
|
|
return artists |
2208
|
|
|
|
2209
|
1 |
|
def get_venue(self): |
2210
|
|
|
"""Returns the venue where the event is held.""" |
2211
|
|
|
|
2212
|
|
|
doc = self._request("event.getInfo", True) |
2213
|
|
|
|
2214
|
|
|
v = doc.getElementsByTagName("venue")[0] |
2215
|
|
|
venue_id = _number(_extract(v, "id")) |
2216
|
|
|
|
2217
|
|
|
return Venue(venue_id, self.network, venue_element=v) |
2218
|
|
|
|
2219
|
1 |
|
def get_start_date(self): |
2220
|
|
|
"""Returns the date when the event starts.""" |
2221
|
|
|
|
2222
|
|
|
doc = self._request("event.getInfo", True) |
2223
|
|
|
|
2224
|
|
|
return _extract(doc, "startDate") |
2225
|
|
|
|
2226
|
1 |
|
def get_description(self): |
2227
|
|
|
"""Returns the description of the event. """ |
2228
|
|
|
|
2229
|
|
|
doc = self._request("event.getInfo", True) |
2230
|
|
|
|
2231
|
|
|
return _extract(doc, "description") |
2232
|
|
|
|
2233
|
1 |
|
def get_cover_image(self, size=COVER_MEGA): |
2234
|
|
|
""" |
2235
|
|
|
Returns a uri to the cover image |
2236
|
|
|
size can be one of: |
2237
|
|
|
COVER_MEGA |
2238
|
|
|
COVER_EXTRA_LARGE |
2239
|
|
|
COVER_LARGE |
2240
|
|
|
COVER_MEDIUM |
2241
|
|
|
COVER_SMALL |
2242
|
|
|
""" |
2243
|
|
|
|
2244
|
|
|
doc = self._request("event.getInfo", True) |
2245
|
|
|
|
2246
|
|
|
return _extract_all(doc, "image")[size] |
2247
|
|
|
|
2248
|
1 |
|
def get_attendance_count(self): |
2249
|
|
|
"""Returns the number of attending people. """ |
2250
|
|
|
|
2251
|
|
|
doc = self._request("event.getInfo", True) |
2252
|
|
|
|
2253
|
|
|
return _number(_extract(doc, "attendance")) |
2254
|
|
|
|
2255
|
1 |
|
def get_review_count(self): |
2256
|
|
|
"""Returns the number of available reviews for this event. """ |
2257
|
|
|
|
2258
|
|
|
doc = self._request("event.getInfo", True) |
2259
|
|
|
|
2260
|
|
|
return _number(_extract(doc, "reviews")) |
2261
|
|
|
|
2262
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2263
|
|
|
"""Returns the url of the event page on the network. |
2264
|
|
|
* domain_name: The network's language domain. Possible values: |
2265
|
|
|
o DOMAIN_ENGLISH |
2266
|
|
|
o DOMAIN_GERMAN |
2267
|
|
|
o DOMAIN_SPANISH |
2268
|
|
|
o DOMAIN_FRENCH |
2269
|
|
|
o DOMAIN_ITALIAN |
2270
|
|
|
o DOMAIN_POLISH |
2271
|
|
|
o DOMAIN_PORTUGUESE |
2272
|
|
|
o DOMAIN_SWEDISH |
2273
|
|
|
o DOMAIN_TURKISH |
2274
|
|
|
o DOMAIN_RUSSIAN |
2275
|
|
|
o DOMAIN_JAPANESE |
2276
|
|
|
o DOMAIN_CHINESE |
2277
|
|
|
""" |
2278
|
|
|
|
2279
|
|
|
return self.network._get_url( |
2280
|
|
|
domain_name, "event") % {'id': self.get_id()} |
2281
|
|
|
|
2282
|
1 |
|
def shout(self, message): |
2283
|
|
|
""" |
2284
|
|
|
Post a shout |
2285
|
|
|
""" |
2286
|
|
|
|
2287
|
|
|
params = self._get_params() |
2288
|
|
|
params["message"] = message |
2289
|
|
|
|
2290
|
|
|
self._request("event.Shout", False, params) |
2291
|
|
|
|
2292
|
|
|
|
2293
|
1 |
|
class Country(_BaseObject): |
2294
|
|
|
"""A country at Last.fm.""" |
2295
|
|
|
|
2296
|
1 |
|
name = None |
2297
|
|
|
|
2298
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2299
|
|
|
|
2300
|
1 |
|
def __init__(self, name, network): |
2301
|
|
|
_BaseObject.__init__(self, network, "geo") |
2302
|
|
|
|
2303
|
|
|
self.name = name |
2304
|
|
|
|
2305
|
1 |
|
def __repr__(self): |
2306
|
|
|
return "pylast.Country(%s, %s)" % (repr(self.name), repr(self.network)) |
2307
|
|
|
|
2308
|
1 |
|
@_string_output |
2309
|
|
|
def __str__(self): |
2310
|
|
|
return self.get_name() |
2311
|
|
|
|
2312
|
1 |
|
def __eq__(self, other): |
2313
|
|
|
return self.get_name().lower() == other.get_name().lower() |
2314
|
|
|
|
2315
|
1 |
|
def __ne__(self, other): |
2316
|
|
|
return self.get_name() != other.get_name() |
2317
|
|
|
|
2318
|
1 |
|
def _get_params(self): # TODO can move to _BaseObject |
2319
|
|
|
return {'country': self.get_name()} |
2320
|
|
|
|
2321
|
1 |
|
def _get_name_from_code(self, alpha2code): |
2322
|
|
|
# TODO: Have this function lookup the alpha-2 code and return the |
2323
|
|
|
# country name. |
2324
|
|
|
|
2325
|
|
|
return alpha2code |
2326
|
|
|
|
2327
|
1 |
|
def get_name(self): |
2328
|
|
|
"""Returns the country name. """ |
2329
|
|
|
|
2330
|
|
|
return self.name |
2331
|
|
|
|
2332
|
1 |
|
def get_top_artists(self, limit=None, cacheable=True): |
2333
|
|
|
"""Returns a sequence of the most played artists.""" |
2334
|
|
|
params = self._get_params() |
2335
|
|
|
if limit: |
2336
|
|
|
params['limit'] = limit |
2337
|
|
|
|
2338
|
|
|
doc = self._request('geo.getTopArtists', cacheable, params) |
2339
|
|
|
|
2340
|
|
|
return _extract_top_artists(doc, self) |
2341
|
|
|
|
2342
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
2343
|
|
|
"""Returns a sequence of the most played tracks""" |
2344
|
|
|
params = self._get_params() |
2345
|
|
|
if limit: |
2346
|
|
|
params['limit'] = limit |
2347
|
|
|
|
2348
|
|
|
return self._get_things( |
2349
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2350
|
|
|
|
2351
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2352
|
|
|
"""Returns the url of the event page on the network. |
2353
|
|
|
* domain_name: The network's language domain. Possible values: |
2354
|
|
|
o DOMAIN_ENGLISH |
2355
|
|
|
o DOMAIN_GERMAN |
2356
|
|
|
o DOMAIN_SPANISH |
2357
|
|
|
o DOMAIN_FRENCH |
2358
|
|
|
o DOMAIN_ITALIAN |
2359
|
|
|
o DOMAIN_POLISH |
2360
|
|
|
o DOMAIN_PORTUGUESE |
2361
|
|
|
o DOMAIN_SWEDISH |
2362
|
|
|
o DOMAIN_TURKISH |
2363
|
|
|
o DOMAIN_RUSSIAN |
2364
|
|
|
o DOMAIN_JAPANESE |
2365
|
|
|
o DOMAIN_CHINESE |
2366
|
|
|
""" |
2367
|
|
|
|
2368
|
|
|
country_name = _url_safe(self.get_name()) |
2369
|
|
|
|
2370
|
|
|
return self.network._get_url( |
2371
|
|
|
domain_name, "country") % {'country_name': country_name} |
2372
|
|
|
|
2373
|
|
|
|
2374
|
1 |
|
class Metro(_BaseObject): |
2375
|
|
|
"""A metro at Last.fm.""" |
2376
|
|
|
|
2377
|
1 |
|
name = None |
2378
|
1 |
|
country = None |
2379
|
|
|
|
2380
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2381
|
|
|
|
2382
|
1 |
|
def __init__(self, name, country, network): |
2383
|
|
|
_BaseObject.__init__(self, network, None) |
2384
|
|
|
|
2385
|
|
|
self.name = name |
2386
|
|
|
self.country = country |
2387
|
|
|
|
2388
|
1 |
|
def __repr__(self): |
2389
|
|
|
return "pylast.Metro(%s, %s, %s)" % ( |
2390
|
|
|
repr(self.name), repr(self.country), repr(self.network)) |
2391
|
|
|
|
2392
|
1 |
|
@_string_output |
2393
|
|
|
def __str__(self): |
2394
|
|
|
return self.get_name() + ", " + self.get_country() |
2395
|
|
|
|
2396
|
1 |
|
def __eq__(self, other): |
2397
|
|
|
return (self.get_name().lower() == other.get_name().lower() and |
2398
|
|
|
self.get_country().lower() == other.get_country().lower()) |
2399
|
|
|
|
2400
|
1 |
|
def __ne__(self, other): |
2401
|
|
|
return (self.get_name() != other.get_name() or |
2402
|
|
|
self.get_country().lower() != other.get_country().lower()) |
2403
|
|
|
|
2404
|
1 |
|
def _get_params(self): |
2405
|
|
|
return {'metro': self.get_name(), 'country': self.get_country()} |
2406
|
|
|
|
2407
|
1 |
|
def get_name(self): |
2408
|
|
|
"""Returns the metro name.""" |
2409
|
|
|
|
2410
|
|
|
return self.name |
2411
|
|
|
|
2412
|
1 |
|
def get_country(self): |
2413
|
|
|
"""Returns the metro country.""" |
2414
|
|
|
|
2415
|
|
|
return self.country |
2416
|
|
|
|
2417
|
1 |
|
def _get_chart( |
2418
|
|
|
self, method, tag="artist", limit=None, from_date=None, |
2419
|
|
|
to_date=None, cacheable=True): |
2420
|
|
|
"""Internal helper for getting geo charts.""" |
2421
|
|
|
params = self._get_params() |
2422
|
|
|
if limit: |
2423
|
|
|
params["limit"] = limit |
2424
|
|
|
if from_date and to_date: |
2425
|
|
|
params["from"] = from_date |
2426
|
|
|
params["to"] = to_date |
2427
|
|
|
|
2428
|
|
|
doc = self._request(method, cacheable, params) |
2429
|
|
|
|
2430
|
|
|
seq = [] |
2431
|
|
|
for node in doc.getElementsByTagName(tag): |
2432
|
|
|
if tag == "artist": |
2433
|
|
|
item = Artist(_extract(node, "name"), self.network) |
2434
|
|
|
elif tag == "track": |
2435
|
|
|
title = _extract(node, "name") |
2436
|
|
|
artist = _extract_element_tree(node).get('artist')['name'] |
2437
|
|
|
item = Track(artist, title, self.network) |
2438
|
|
|
else: |
2439
|
|
|
return None |
2440
|
|
|
weight = _number(_extract(node, "listeners")) |
2441
|
|
|
seq.append(TopItem(item, weight)) |
2442
|
|
|
|
2443
|
|
|
return seq |
2444
|
|
|
|
2445
|
1 |
|
def get_artist_chart( |
2446
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2447
|
|
|
cacheable=True): |
2448
|
|
|
"""Get a chart of artists for a metro. |
2449
|
|
|
Parameters: |
2450
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2451
|
|
|
requested |
2452
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2453
|
|
|
limit (Optional) : The number of results to fetch per page. |
2454
|
|
|
Defaults to 50. |
2455
|
|
|
""" |
2456
|
|
|
return self._get_chart( |
2457
|
|
|
"geo.getMetroArtistChart", tag=tag, limit=limit, |
2458
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2459
|
|
|
|
2460
|
1 |
|
def get_hype_artist_chart( |
2461
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2462
|
|
|
cacheable=True): |
2463
|
|
|
"""Get a chart of hyped (up and coming) artists for a metro. |
2464
|
|
|
Parameters: |
2465
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2466
|
|
|
requested |
2467
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2468
|
|
|
limit (Optional) : The number of results to fetch per page. |
2469
|
|
|
Defaults to 50. |
2470
|
|
|
""" |
2471
|
|
|
return self._get_chart( |
2472
|
|
|
"geo.getMetroHypeArtistChart", tag=tag, limit=limit, |
2473
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2474
|
|
|
|
2475
|
1 |
|
def get_unique_artist_chart( |
2476
|
|
|
self, tag="artist", limit=None, from_date=None, to_date=None, |
2477
|
|
|
cacheable=True): |
2478
|
|
|
"""Get a chart of the artists which make that metro unique. |
2479
|
|
|
Parameters: |
2480
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2481
|
|
|
requested |
2482
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2483
|
|
|
limit (Optional) : The number of results to fetch per page. |
2484
|
|
|
Defaults to 50. |
2485
|
|
|
""" |
2486
|
|
|
return self._get_chart( |
2487
|
|
|
"geo.getMetroUniqueArtistChart", tag=tag, limit=limit, |
2488
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2489
|
|
|
|
2490
|
1 |
|
def get_track_chart( |
2491
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2492
|
|
|
cacheable=True): |
2493
|
|
|
"""Get a chart of tracks for a metro. |
2494
|
|
|
Parameters: |
2495
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2496
|
|
|
requested |
2497
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2498
|
|
|
limit (Optional) : The number of results to fetch per page. |
2499
|
|
|
Defaults to 50. |
2500
|
|
|
""" |
2501
|
|
|
return self._get_chart( |
2502
|
|
|
"geo.getMetroTrackChart", tag=tag, limit=limit, |
2503
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2504
|
|
|
|
2505
|
1 |
|
def get_hype_track_chart( |
2506
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2507
|
|
|
cacheable=True): |
2508
|
|
|
"""Get a chart of tracks for a metro. |
2509
|
|
|
Parameters: |
2510
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2511
|
|
|
requested |
2512
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2513
|
|
|
limit (Optional) : The number of results to fetch per page. |
2514
|
|
|
Defaults to 50. |
2515
|
|
|
""" |
2516
|
|
|
return self._get_chart( |
2517
|
|
|
"geo.getMetroHypeTrackChart", tag=tag, |
2518
|
|
|
limit=limit, from_date=from_date, to_date=to_date, |
2519
|
|
|
cacheable=cacheable) |
2520
|
|
|
|
2521
|
1 |
|
def get_unique_track_chart( |
2522
|
|
|
self, tag="track", limit=None, from_date=None, to_date=None, |
2523
|
|
|
cacheable=True): |
2524
|
|
|
"""Get a chart of tracks for a metro. |
2525
|
|
|
Parameters: |
2526
|
|
|
from_date (Optional) : Beginning timestamp of the weekly range |
2527
|
|
|
requested |
2528
|
|
|
to_date (Optional) : Ending timestamp of the weekly range requested |
2529
|
|
|
limit (Optional) : The number of results to fetch per page. |
2530
|
|
|
Defaults to 50. |
2531
|
|
|
""" |
2532
|
|
|
return self._get_chart( |
2533
|
|
|
"geo.getMetroUniqueTrackChart", tag=tag, limit=limit, |
2534
|
|
|
from_date=from_date, to_date=to_date, cacheable=cacheable) |
2535
|
|
|
|
2536
|
|
|
|
2537
|
1 |
|
class Library(_BaseObject): |
2538
|
|
|
"""A user's Last.fm library.""" |
2539
|
|
|
|
2540
|
1 |
|
user = None |
2541
|
|
|
|
2542
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2543
|
|
|
|
2544
|
1 |
|
def __init__(self, user, network): |
2545
|
|
|
_BaseObject.__init__(self, network, 'library') |
2546
|
|
|
|
2547
|
|
|
if isinstance(user, User): |
2548
|
|
|
self.user = user |
2549
|
|
|
else: |
2550
|
|
|
self.user = User(user, self.network) |
2551
|
|
|
|
2552
|
|
|
self._albums_index = 0 |
2553
|
|
|
self._artists_index = 0 |
2554
|
|
|
self._tracks_index = 0 |
2555
|
|
|
|
2556
|
1 |
|
def __repr__(self): |
2557
|
|
|
return "pylast.Library(%s, %s)" % (repr(self.user), repr(self.network)) |
2558
|
|
|
|
2559
|
1 |
|
@_string_output |
2560
|
|
|
def __str__(self): |
2561
|
|
|
return repr(self.get_user()) + "'s Library" |
2562
|
|
|
|
2563
|
1 |
|
def _get_params(self): |
2564
|
|
|
return {'user': self.user.get_name()} |
2565
|
|
|
|
2566
|
1 |
|
def get_user(self): |
2567
|
|
|
"""Returns the user who owns this library.""" |
2568
|
|
|
|
2569
|
|
|
return self.user |
2570
|
|
|
|
2571
|
1 |
|
def add_album(self, album): |
2572
|
|
|
"""Add an album to this library.""" |
2573
|
|
|
|
2574
|
|
|
params = self._get_params() |
2575
|
|
|
params["artist"] = album.get_artist().get_name() |
2576
|
|
|
params["album"] = album.get_name() |
2577
|
|
|
|
2578
|
|
|
self._request("library.addAlbum", False, params) |
2579
|
|
|
|
2580
|
1 |
|
def remove_album(self, album): |
2581
|
|
|
"""Remove an album from this library.""" |
2582
|
|
|
|
2583
|
|
|
params = self._get_params() |
2584
|
|
|
params["artist"] = album.get_artist().get_name() |
2585
|
|
|
params["album"] = album.get_name() |
2586
|
|
|
|
2587
|
|
|
self._request(self.ws_prefix + ".removeAlbum", False, params) |
2588
|
|
|
|
2589
|
1 |
|
def add_artist(self, artist): |
2590
|
|
|
"""Add an artist to this library.""" |
2591
|
|
|
|
2592
|
|
|
params = self._get_params() |
2593
|
|
|
if type(artist) == str: |
2594
|
|
|
params["artist"] = artist |
2595
|
|
|
else: |
2596
|
|
|
params["artist"] = artist.get_name() |
2597
|
|
|
|
2598
|
|
|
self._request(self.ws_prefix + ".addArtist", False, params) |
2599
|
|
|
|
2600
|
1 |
|
def remove_artist(self, artist): |
2601
|
|
|
"""Remove an artist from this library.""" |
2602
|
|
|
|
2603
|
|
|
params = self._get_params() |
2604
|
|
|
if type(artist) == str: |
2605
|
|
|
params["artist"] = artist |
2606
|
|
|
else: |
2607
|
|
|
params["artist"] = artist.get_name() |
2608
|
|
|
|
2609
|
|
|
self._request(self.ws_prefix + ".removeArtist", False, params) |
2610
|
|
|
|
2611
|
1 |
|
def add_track(self, track): |
2612
|
|
|
"""Add a track to this library.""" |
2613
|
|
|
|
2614
|
|
|
params = self._get_params() |
2615
|
|
|
params["track"] = track.get_title() |
2616
|
|
|
|
2617
|
|
|
self._request(self.ws_prefix + ".addTrack", False, params) |
2618
|
|
|
|
2619
|
1 |
|
def get_albums(self, artist=None, limit=50, cacheable=True): |
2620
|
|
|
""" |
2621
|
|
|
Returns a sequence of Album objects |
2622
|
|
|
If no artist is specified, it will return all, sorted by decreasing |
2623
|
|
|
play count. |
2624
|
|
|
If limit==None it will return all (may take a while) |
2625
|
|
|
""" |
2626
|
|
|
|
2627
|
|
|
params = self._get_params() |
2628
|
|
|
if artist: |
2629
|
|
|
params["artist"] = artist |
2630
|
|
|
|
2631
|
|
|
seq = [] |
2632
|
|
|
for node in _collect_nodes( |
2633
|
|
|
limit, |
2634
|
|
|
self, |
2635
|
|
|
self.ws_prefix + ".getAlbums", |
2636
|
|
|
cacheable, |
2637
|
|
|
params): |
2638
|
|
|
name = _extract(node, "name") |
2639
|
|
|
artist = _extract(node, "name", 1) |
2640
|
|
|
playcount = _number(_extract(node, "playcount")) |
2641
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2642
|
|
|
|
2643
|
|
|
seq.append(LibraryItem( |
2644
|
|
|
Album(artist, name, self.network), playcount, tagcount)) |
2645
|
|
|
|
2646
|
|
|
return seq |
2647
|
|
|
|
2648
|
1 |
|
def get_artists(self, limit=50, cacheable=True): |
2649
|
|
|
""" |
2650
|
|
|
Returns a sequence of Album objects |
2651
|
|
|
if limit==None it will return all (may take a while) |
2652
|
|
|
""" |
2653
|
|
|
|
2654
|
|
|
seq = [] |
2655
|
|
|
for node in _collect_nodes( |
2656
|
|
|
limit, |
2657
|
|
|
self, |
2658
|
|
|
self.ws_prefix + ".getArtists", |
2659
|
|
|
cacheable): |
2660
|
|
|
name = _extract(node, "name") |
2661
|
|
|
|
2662
|
|
|
playcount = _number(_extract(node, "playcount")) |
2663
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2664
|
|
|
|
2665
|
|
|
seq.append(LibraryItem( |
2666
|
|
|
Artist(name, self.network), playcount, tagcount)) |
2667
|
|
|
|
2668
|
|
|
return seq |
2669
|
|
|
|
2670
|
1 |
|
def get_tracks(self, artist=None, album=None, limit=50, cacheable=True): |
2671
|
|
|
""" |
2672
|
|
|
Returns a sequence of Album objects |
2673
|
|
|
If limit==None it will return all (may take a while) |
2674
|
|
|
""" |
2675
|
|
|
|
2676
|
|
|
params = self._get_params() |
2677
|
|
|
if artist: |
2678
|
|
|
params["artist"] = artist |
2679
|
|
|
if album: |
2680
|
|
|
params["album"] = album |
2681
|
|
|
|
2682
|
|
|
seq = [] |
2683
|
|
|
for node in _collect_nodes( |
2684
|
|
|
limit, |
2685
|
|
|
self, |
2686
|
|
|
self.ws_prefix + ".getTracks", |
2687
|
|
|
cacheable, |
2688
|
|
|
params): |
2689
|
|
|
name = _extract(node, "name") |
2690
|
|
|
artist = _extract(node, "name", 1) |
2691
|
|
|
playcount = _number(_extract(node, "playcount")) |
2692
|
|
|
tagcount = _number(_extract(node, "tagcount")) |
2693
|
|
|
|
2694
|
|
|
seq.append(LibraryItem( |
2695
|
|
|
Track(artist, name, self.network), playcount, tagcount)) |
2696
|
|
|
|
2697
|
|
|
return seq |
2698
|
|
|
|
2699
|
1 |
|
def remove_scrobble(self, artist, title, timestamp): |
2700
|
|
|
"""Remove a scrobble from a user's Last.fm library. Parameters: |
2701
|
|
|
artist (Required) : The artist that composed the track |
2702
|
|
|
title (Required) : The name of the track |
2703
|
|
|
timestamp (Required) : The unix timestamp of the scrobble |
2704
|
|
|
that you wish to remove |
2705
|
|
|
""" |
2706
|
|
|
|
2707
|
|
|
params = self._get_params() |
2708
|
|
|
params["artist"] = artist |
2709
|
|
|
params["track"] = title |
2710
|
|
|
params["timestamp"] = timestamp |
2711
|
|
|
|
2712
|
|
|
self._request(self.ws_prefix + ".removeScrobble", False, params) |
2713
|
|
|
|
2714
|
|
|
|
2715
|
1 |
|
class Playlist(_BaseObject): |
2716
|
|
|
"""A Last.fm user playlist.""" |
2717
|
|
|
|
2718
|
1 |
|
id = None |
2719
|
1 |
|
user = None |
2720
|
|
|
|
2721
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2722
|
|
|
|
2723
|
1 |
|
def __init__(self, user, playlist_id, network): |
2724
|
|
|
_BaseObject.__init__(self, network, "playlist") |
2725
|
|
|
|
2726
|
|
|
if isinstance(user, User): |
2727
|
|
|
self.user = user |
2728
|
|
|
else: |
2729
|
|
|
self.user = User(user, self.network) |
2730
|
|
|
|
2731
|
|
|
self.id = playlist_id |
2732
|
|
|
|
2733
|
1 |
|
@_string_output |
2734
|
|
|
def __str__(self): |
2735
|
|
|
return repr(self.user) + "'s playlist # " + repr(self.id) |
2736
|
|
|
|
2737
|
1 |
|
def _get_info_node(self): |
2738
|
|
|
""" |
2739
|
|
|
Returns the node from user.getPlaylists where this playlist's info is. |
2740
|
|
|
""" |
2741
|
|
|
|
2742
|
|
|
doc = self._request("user.getPlaylists", True) |
2743
|
|
|
|
2744
|
|
|
for node in doc.getElementsByTagName("playlist"): |
2745
|
|
|
if _extract(node, "id") == str(self.get_id()): |
2746
|
|
|
return node |
2747
|
|
|
|
2748
|
1 |
|
def _get_params(self): |
2749
|
|
|
return {'user': self.user.get_name(), 'playlistID': self.get_id()} |
2750
|
|
|
|
2751
|
1 |
|
def get_id(self): |
2752
|
|
|
"""Returns the playlist ID.""" |
2753
|
|
|
|
2754
|
|
|
return self.id |
2755
|
|
|
|
2756
|
1 |
|
def get_user(self): |
2757
|
|
|
"""Returns the owner user of this playlist.""" |
2758
|
|
|
|
2759
|
|
|
return self.user |
2760
|
|
|
|
2761
|
1 |
|
def get_tracks(self): |
2762
|
|
|
"""Returns a list of the tracks on this user playlist.""" |
2763
|
|
|
|
2764
|
|
|
uri = _unicode('lastfm://playlist/%s') % self.get_id() |
2765
|
|
|
|
2766
|
|
|
return XSPF(uri, self.network).get_tracks() |
2767
|
|
|
|
2768
|
1 |
|
def add_track(self, track): |
2769
|
|
|
"""Adds a Track to this Playlist.""" |
2770
|
|
|
|
2771
|
|
|
params = self._get_params() |
2772
|
|
|
params['artist'] = track.get_artist().get_name() |
2773
|
|
|
params['track'] = track.get_title() |
2774
|
|
|
|
2775
|
|
|
self._request('playlist.addTrack', False, params) |
2776
|
|
|
|
2777
|
1 |
|
def get_title(self): |
2778
|
|
|
"""Returns the title of this playlist.""" |
2779
|
|
|
|
2780
|
|
|
return _extract(self._get_info_node(), "title") |
2781
|
|
|
|
2782
|
1 |
|
def get_creation_date(self): |
2783
|
|
|
"""Returns the creation date of this playlist.""" |
2784
|
|
|
|
2785
|
|
|
return _extract(self._get_info_node(), "date") |
2786
|
|
|
|
2787
|
1 |
|
def get_size(self): |
2788
|
|
|
"""Returns the number of tracks in this playlist.""" |
2789
|
|
|
|
2790
|
|
|
return _number(_extract(self._get_info_node(), "size")) |
2791
|
|
|
|
2792
|
1 |
|
def get_description(self): |
2793
|
|
|
"""Returns the description of this playlist.""" |
2794
|
|
|
|
2795
|
|
|
return _extract(self._get_info_node(), "description") |
2796
|
|
|
|
2797
|
1 |
|
def get_duration(self): |
2798
|
|
|
"""Returns the duration of this playlist in milliseconds.""" |
2799
|
|
|
|
2800
|
|
|
return _number(_extract(self._get_info_node(), "duration")) |
2801
|
|
|
|
2802
|
1 |
|
def is_streamable(self): |
2803
|
|
|
""" |
2804
|
|
|
Returns True if the playlist is streamable. |
2805
|
|
|
For a playlist to be streamable, it needs at least 45 tracks by 15 |
2806
|
|
|
different artists.""" |
2807
|
|
|
|
2808
|
|
|
if _extract(self._get_info_node(), "streamable") == '1': |
2809
|
|
|
return True |
2810
|
|
|
else: |
2811
|
|
|
return False |
2812
|
|
|
|
2813
|
1 |
|
def has_track(self, track): |
2814
|
|
|
"""Checks to see if track is already in the playlist. |
2815
|
|
|
* track: Any Track object. |
2816
|
|
|
""" |
2817
|
|
|
|
2818
|
|
|
return track in self.get_tracks() |
2819
|
|
|
|
2820
|
1 |
|
def get_cover_image(self, size=COVER_EXTRA_LARGE): |
2821
|
|
|
""" |
2822
|
|
|
Returns a uri to the cover image |
2823
|
|
|
size can be one of: |
2824
|
|
|
COVER_MEGA |
2825
|
|
|
COVER_EXTRA_LARGE |
2826
|
|
|
COVER_LARGE |
2827
|
|
|
COVER_MEDIUM |
2828
|
|
|
COVER_SMALL |
2829
|
|
|
""" |
2830
|
|
|
|
2831
|
|
|
return _extract(self._get_info_node(), "image")[size] |
2832
|
|
|
|
2833
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2834
|
|
|
"""Returns the url of the playlist on the network. |
2835
|
|
|
* domain_name: The network's language domain. Possible values: |
2836
|
|
|
o DOMAIN_ENGLISH |
2837
|
|
|
o DOMAIN_GERMAN |
2838
|
|
|
o DOMAIN_SPANISH |
2839
|
|
|
o DOMAIN_FRENCH |
2840
|
|
|
o DOMAIN_ITALIAN |
2841
|
|
|
o DOMAIN_POLISH |
2842
|
|
|
o DOMAIN_PORTUGUESE |
2843
|
|
|
o DOMAIN_SWEDISH |
2844
|
|
|
o DOMAIN_TURKISH |
2845
|
|
|
o DOMAIN_RUSSIAN |
2846
|
|
|
o DOMAIN_JAPANESE |
2847
|
|
|
o DOMAIN_CHINESE |
2848
|
|
|
""" |
2849
|
|
|
|
2850
|
|
|
english_url = _extract(self._get_info_node(), "url") |
2851
|
|
|
appendix = english_url[english_url.rfind("/") + 1:] |
2852
|
|
|
|
2853
|
|
|
return self.network._get_url(domain_name, "playlist") % { |
2854
|
|
|
'appendix': appendix, "user": self.get_user().get_name()} |
2855
|
|
|
|
2856
|
|
|
|
2857
|
1 |
|
class Tag(_BaseObject, _Chartable): |
2858
|
|
|
"""A Last.fm object tag.""" |
2859
|
|
|
|
2860
|
1 |
|
name = None |
2861
|
|
|
|
2862
|
1 |
|
__hash__ = _BaseObject.__hash__ |
2863
|
|
|
|
2864
|
1 |
|
def __init__(self, name, network): |
2865
|
|
|
_BaseObject.__init__(self, network, 'tag') |
2866
|
|
|
_Chartable.__init__(self, 'tag') |
2867
|
|
|
|
2868
|
|
|
self.name = name |
2869
|
|
|
|
2870
|
1 |
|
def __repr__(self): |
2871
|
|
|
return "pylast.Tag(%s, %s)" % (repr(self.name), repr(self.network)) |
2872
|
|
|
|
2873
|
1 |
|
@_string_output |
2874
|
|
|
def __str__(self): |
2875
|
|
|
return self.get_name() |
2876
|
|
|
|
2877
|
1 |
|
def __eq__(self, other): |
2878
|
|
|
return self.get_name().lower() == other.get_name().lower() |
2879
|
|
|
|
2880
|
1 |
|
def __ne__(self, other): |
2881
|
|
|
return self.get_name().lower() != other.get_name().lower() |
2882
|
|
|
|
2883
|
1 |
|
def _get_params(self): |
2884
|
|
|
return {self.ws_prefix: self.get_name()} |
2885
|
|
|
|
2886
|
1 |
|
def get_name(self, properly_capitalized=False): |
2887
|
|
|
"""Returns the name of the tag. """ |
2888
|
|
|
|
2889
|
|
|
if properly_capitalized: |
2890
|
|
|
self.name = _extract( |
2891
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
2892
|
|
|
|
2893
|
|
|
return self.name |
2894
|
|
|
|
2895
|
1 |
|
def get_similar(self): |
2896
|
|
|
"""Returns the tags similar to this one, ordered by similarity. """ |
2897
|
|
|
|
2898
|
|
|
doc = self._request(self.ws_prefix + '.getSimilar', True) |
2899
|
|
|
|
2900
|
|
|
seq = [] |
2901
|
|
|
names = _extract_all(doc, 'name') |
2902
|
|
|
for name in names: |
2903
|
|
|
seq.append(Tag(name, self.network)) |
2904
|
|
|
|
2905
|
|
|
return seq |
2906
|
|
|
|
2907
|
1 |
|
def get_top_albums(self, limit=None, cacheable=True): |
2908
|
|
|
"""Retuns a list of the top albums.""" |
2909
|
|
|
params = self._get_params() |
2910
|
|
|
if limit: |
2911
|
|
|
params['limit'] = limit |
2912
|
|
|
|
2913
|
|
|
doc = self._request( |
2914
|
|
|
self.ws_prefix + '.getTopAlbums', cacheable, params) |
2915
|
|
|
|
2916
|
|
|
return _extract_top_albums(doc, self.network) |
2917
|
|
|
|
2918
|
1 |
|
def get_top_tracks(self, limit=None, cacheable=True): |
2919
|
|
|
"""Returns a list of the most played Tracks for this tag.""" |
2920
|
|
|
params = self._get_params() |
2921
|
|
|
if limit: |
2922
|
|
|
params['limit'] = limit |
2923
|
|
|
|
2924
|
|
|
return self._get_things( |
2925
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
2926
|
|
|
|
2927
|
1 |
|
def get_top_artists(self, limit=None, cacheable=True): |
2928
|
|
|
"""Returns a sequence of the most played artists.""" |
2929
|
|
|
|
2930
|
|
|
params = self._get_params() |
2931
|
|
|
if limit: |
2932
|
|
|
params['limit'] = limit |
2933
|
|
|
|
2934
|
|
|
doc = self._request( |
2935
|
|
|
self.ws_prefix + '.getTopArtists', cacheable, params) |
2936
|
|
|
|
2937
|
|
|
return _extract_top_artists(doc, self.network) |
2938
|
|
|
|
2939
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
2940
|
|
|
"""Returns the url of the tag page on the network. |
2941
|
|
|
* domain_name: The network's language domain. Possible values: |
2942
|
|
|
o DOMAIN_ENGLISH |
2943
|
|
|
o DOMAIN_GERMAN |
2944
|
|
|
o DOMAIN_SPANISH |
2945
|
|
|
o DOMAIN_FRENCH |
2946
|
|
|
o DOMAIN_ITALIAN |
2947
|
|
|
o DOMAIN_POLISH |
2948
|
|
|
o DOMAIN_PORTUGUESE |
2949
|
|
|
o DOMAIN_SWEDISH |
2950
|
|
|
o DOMAIN_TURKISH |
2951
|
|
|
o DOMAIN_RUSSIAN |
2952
|
|
|
o DOMAIN_JAPANESE |
2953
|
|
|
o DOMAIN_CHINESE |
2954
|
|
|
""" |
2955
|
|
|
|
2956
|
|
|
name = _url_safe(self.get_name()) |
2957
|
|
|
|
2958
|
|
|
return self.network._get_url(domain_name, "tag") % {'name': name} |
2959
|
|
|
|
2960
|
|
|
|
2961
|
1 |
|
class Track(_Opus): |
2962
|
|
|
"""A Last.fm track.""" |
2963
|
|
|
|
2964
|
1 |
|
__hash__ = _Opus.__hash__ |
2965
|
|
|
|
2966
|
1 |
|
def __init__(self, artist, title, network, username=None): |
2967
|
|
|
super(Track, self).__init__(artist, title, network, "track", username) |
2968
|
|
|
|
2969
|
1 |
|
def get_correction(self): |
2970
|
|
|
"""Returns the corrected track name.""" |
2971
|
|
|
|
2972
|
|
|
return _extract( |
2973
|
|
|
self._request(self.ws_prefix + ".getCorrection"), "name") |
2974
|
|
|
|
2975
|
1 |
|
def get_duration(self): |
2976
|
|
|
"""Returns the track duration.""" |
2977
|
|
|
|
2978
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
2979
|
|
|
|
2980
|
|
|
return _number(_extract(doc, "duration")) |
2981
|
|
|
|
2982
|
1 |
|
def get_userloved(self): |
2983
|
|
|
"""Whether the user loved this track""" |
2984
|
|
|
|
2985
|
|
|
if not self.username: |
2986
|
|
|
return |
2987
|
|
|
|
2988
|
|
|
params = self._get_params() |
2989
|
|
|
params['username'] = self.username |
2990
|
|
|
|
2991
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True, params) |
2992
|
|
|
loved = _number(_extract(doc, "userloved")) |
2993
|
|
|
return bool(loved) |
2994
|
|
|
|
2995
|
1 |
|
def is_streamable(self): |
2996
|
|
|
"""Returns True if the track is available at Last.fm.""" |
2997
|
|
|
|
2998
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
2999
|
|
|
return _extract(doc, "streamable") == "1" |
3000
|
|
|
|
3001
|
1 |
|
def is_fulltrack_available(self): |
3002
|
|
|
"""Returns True if the fulltrack is available for streaming.""" |
3003
|
|
|
|
3004
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3005
|
|
|
return doc.getElementsByTagName( |
3006
|
|
|
"streamable")[0].getAttribute("fulltrack") == "1" |
3007
|
|
|
|
3008
|
1 |
|
def get_album(self): |
3009
|
|
|
"""Returns the album object of this track.""" |
3010
|
|
|
|
3011
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3012
|
|
|
|
3013
|
|
|
albums = doc.getElementsByTagName("album") |
3014
|
|
|
|
3015
|
|
|
if len(albums) == 0: |
3016
|
|
|
return |
3017
|
|
|
|
3018
|
|
|
node = doc.getElementsByTagName("album")[0] |
3019
|
|
|
return Album( |
3020
|
|
|
_extract(node, "artist"), _extract(node, "title"), self.network) |
3021
|
|
|
|
3022
|
1 |
|
def love(self): |
3023
|
|
|
"""Adds the track to the user's loved tracks. """ |
3024
|
|
|
|
3025
|
|
|
self._request(self.ws_prefix + '.love') |
3026
|
|
|
|
3027
|
1 |
|
def unlove(self): |
3028
|
|
|
"""Remove the track to the user's loved tracks. """ |
3029
|
|
|
|
3030
|
|
|
self._request(self.ws_prefix + '.unlove') |
3031
|
|
|
|
3032
|
1 |
|
def ban(self): |
3033
|
|
|
"""Ban this track from ever playing on the radio. """ |
3034
|
|
|
|
3035
|
|
|
self._request(self.ws_prefix + '.ban') |
3036
|
|
|
|
3037
|
1 |
|
def get_similar(self): |
3038
|
|
|
""" |
3039
|
|
|
Returns similar tracks for this track on the network, |
3040
|
|
|
based on listening data. |
3041
|
|
|
""" |
3042
|
|
|
|
3043
|
|
|
doc = self._request(self.ws_prefix + '.getSimilar', True) |
3044
|
|
|
|
3045
|
|
|
seq = [] |
3046
|
|
|
for node in doc.getElementsByTagName(self.ws_prefix): |
3047
|
|
|
title = _extract(node, 'name') |
3048
|
|
|
artist = _extract(node, 'name', 1) |
3049
|
|
|
match = _number(_extract(node, "match")) |
3050
|
|
|
|
3051
|
|
|
seq.append(SimilarItem(Track(artist, title, self.network), match)) |
3052
|
|
|
|
3053
|
|
|
return seq |
3054
|
|
|
|
3055
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3056
|
|
|
"""Returns the URL of the album or track page on the network. |
3057
|
|
|
# Parameters: |
3058
|
|
|
* domain_name str: The network's language domain. Possible values: |
3059
|
|
|
o DOMAIN_ENGLISH |
3060
|
|
|
o DOMAIN_GERMAN |
3061
|
|
|
o DOMAIN_SPANISH |
3062
|
|
|
o DOMAIN_FRENCH |
3063
|
|
|
o DOMAIN_ITALIAN |
3064
|
|
|
o DOMAIN_POLISH |
3065
|
|
|
o DOMAIN_PORTUGUESE |
3066
|
|
|
o DOMAIN_SWEDISH |
3067
|
|
|
o DOMAIN_TURKISH |
3068
|
|
|
o DOMAIN_RUSSIAN |
3069
|
|
|
o DOMAIN_JAPANESE |
3070
|
|
|
o DOMAIN_CHINESE |
3071
|
|
|
""" |
3072
|
|
|
|
3073
|
|
|
artist = _url_safe(self.get_artist().get_name()) |
3074
|
|
|
title = _url_safe(self.get_title()) |
3075
|
|
|
|
3076
|
|
|
return self.network._get_url( |
3077
|
|
|
domain_name, self.ws_prefix) % { |
3078
|
|
|
'artist': artist, 'title': title} |
3079
|
|
|
|
3080
|
|
|
|
3081
|
1 |
|
class Group(_BaseObject, _Chartable): |
3082
|
|
|
"""A Last.fm group.""" |
3083
|
|
|
|
3084
|
1 |
|
name = None |
3085
|
|
|
|
3086
|
1 |
|
__hash__ = _BaseObject.__hash__ |
3087
|
|
|
|
3088
|
1 |
|
def __init__(self, name, network): |
3089
|
|
|
_BaseObject.__init__(self, network, 'group') |
3090
|
|
|
_Chartable.__init__(self, 'group') |
3091
|
|
|
|
3092
|
|
|
self.name = name |
3093
|
|
|
|
3094
|
1 |
|
def __repr__(self): |
3095
|
|
|
return "pylast.Group(%s, %s)" % (repr(self.name), repr(self.network)) |
3096
|
|
|
|
3097
|
1 |
|
@_string_output |
3098
|
|
|
def __str__(self): |
3099
|
|
|
return self.get_name() |
3100
|
|
|
|
3101
|
1 |
|
def __eq__(self, other): |
3102
|
|
|
return self.get_name().lower() == other.get_name().lower() |
3103
|
|
|
|
3104
|
1 |
|
def __ne__(self, other): |
3105
|
|
|
return self.get_name() != other.get_name() |
3106
|
|
|
|
3107
|
1 |
|
def _get_params(self): |
3108
|
|
|
return {self.ws_prefix: self.get_name()} |
3109
|
|
|
|
3110
|
1 |
|
def get_name(self): |
3111
|
|
|
"""Returns the group name. """ |
3112
|
|
|
return self.name |
3113
|
|
|
|
3114
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3115
|
|
|
"""Returns the url of the group page on the network. |
3116
|
|
|
* domain_name: The network's language domain. Possible values: |
3117
|
|
|
o DOMAIN_ENGLISH |
3118
|
|
|
o DOMAIN_GERMAN |
3119
|
|
|
o DOMAIN_SPANISH |
3120
|
|
|
o DOMAIN_FRENCH |
3121
|
|
|
o DOMAIN_ITALIAN |
3122
|
|
|
o DOMAIN_POLISH |
3123
|
|
|
o DOMAIN_PORTUGUESE |
3124
|
|
|
o DOMAIN_SWEDISH |
3125
|
|
|
o DOMAIN_TURKISH |
3126
|
|
|
o DOMAIN_RUSSIAN |
3127
|
|
|
o DOMAIN_JAPANESE |
3128
|
|
|
o DOMAIN_CHINESE |
3129
|
|
|
""" |
3130
|
|
|
|
3131
|
|
|
name = _url_safe(self.get_name()) |
3132
|
|
|
|
3133
|
|
|
return self.network._get_url(domain_name, "group") % {'name': name} |
3134
|
|
|
|
3135
|
1 |
|
def get_members(self, limit=50, cacheable=False): |
3136
|
|
|
""" |
3137
|
|
|
Returns a sequence of User objects |
3138
|
|
|
if limit==None it will return all |
3139
|
|
|
""" |
3140
|
|
|
|
3141
|
|
|
nodes = _collect_nodes( |
3142
|
|
|
limit, self, self.ws_prefix + ".getMembers", cacheable) |
3143
|
|
|
|
3144
|
|
|
users = [] |
3145
|
|
|
|
3146
|
|
|
for node in nodes: |
3147
|
|
|
users.append(User(_extract(node, "name"), self.network)) |
3148
|
|
|
|
3149
|
|
|
return users |
3150
|
|
|
|
3151
|
|
|
|
3152
|
1 |
|
class XSPF(_BaseObject): |
3153
|
|
|
"A Last.fm XSPF playlist.""" |
3154
|
|
|
|
3155
|
1 |
|
uri = None |
3156
|
|
|
|
3157
|
1 |
|
__hash__ = _BaseObject.__hash__ |
3158
|
|
|
|
3159
|
1 |
|
def __init__(self, uri, network): |
3160
|
|
|
_BaseObject.__init__(self, network, None) |
3161
|
|
|
|
3162
|
|
|
self.uri = uri |
3163
|
|
|
|
3164
|
1 |
|
def _get_params(self): |
3165
|
|
|
return {'playlistURL': self.get_uri()} |
3166
|
|
|
|
3167
|
1 |
|
@_string_output |
3168
|
|
|
def __str__(self): |
3169
|
|
|
return self.get_uri() |
3170
|
|
|
|
3171
|
1 |
|
def __eq__(self, other): |
3172
|
|
|
return self.get_uri() == other.get_uri() |
3173
|
|
|
|
3174
|
1 |
|
def __ne__(self, other): |
3175
|
|
|
return self.get_uri() != other.get_uri() |
3176
|
|
|
|
3177
|
1 |
|
def get_uri(self): |
3178
|
|
|
"""Returns the Last.fm playlist URI. """ |
3179
|
|
|
|
3180
|
|
|
return self.uri |
3181
|
|
|
|
3182
|
1 |
|
def get_tracks(self): |
3183
|
|
|
"""Returns the tracks on this playlist.""" |
3184
|
|
|
|
3185
|
|
|
doc = self._request('playlist.fetch', True) |
3186
|
|
|
|
3187
|
|
|
seq = [] |
3188
|
|
|
for node in doc.getElementsByTagName('track'): |
3189
|
|
|
title = _extract(node, 'title') |
3190
|
|
|
artist = _extract(node, 'creator') |
3191
|
|
|
|
3192
|
|
|
seq.append(Track(artist, title, self.network)) |
3193
|
|
|
|
3194
|
|
|
return seq |
3195
|
|
|
|
3196
|
|
|
|
3197
|
1 |
|
class User(_BaseObject, _Chartable): |
3198
|
|
|
"""A Last.fm user.""" |
3199
|
|
|
|
3200
|
1 |
|
name = None |
3201
|
|
|
|
3202
|
1 |
|
__hash__ = _BaseObject.__hash__ |
3203
|
|
|
|
3204
|
1 |
|
def __init__(self, user_name, network): |
3205
|
|
|
_BaseObject.__init__(self, network, 'user') |
3206
|
|
|
_Chartable.__init__(self, 'user') |
3207
|
|
|
|
3208
|
|
|
self.name = user_name |
3209
|
|
|
|
3210
|
|
|
self._past_events_index = 0 |
3211
|
|
|
self._recommended_events_index = 0 |
3212
|
|
|
self._recommended_artists_index = 0 |
3213
|
|
|
|
3214
|
1 |
|
def __repr__(self): |
3215
|
|
|
return "pylast.User(%s, %s)" % (repr(self.name), repr(self.network)) |
3216
|
|
|
|
3217
|
1 |
|
@_string_output |
3218
|
|
|
def __str__(self): |
3219
|
|
|
return self.get_name() |
3220
|
|
|
|
3221
|
1 |
|
def __eq__(self, another): |
3222
|
|
|
if isinstance(another, User): |
3223
|
|
|
return self.get_name() == another.get_name() |
3224
|
|
|
else: |
3225
|
|
|
return False |
3226
|
|
|
|
3227
|
1 |
|
def __ne__(self, another): |
3228
|
|
|
if isinstance(another, User): |
3229
|
|
|
return self.get_name() != another.get_name() |
3230
|
|
|
else: |
3231
|
|
|
return True |
3232
|
|
|
|
3233
|
1 |
|
def _get_params(self): |
3234
|
|
|
return {self.ws_prefix: self.get_name()} |
3235
|
|
|
|
3236
|
1 |
|
def get_name(self, properly_capitalized=False): |
3237
|
|
|
"""Returns the user name.""" |
3238
|
|
|
|
3239
|
|
|
if properly_capitalized: |
3240
|
|
|
self.name = _extract( |
3241
|
|
|
self._request(self.ws_prefix + ".getInfo", True), "name") |
3242
|
|
|
|
3243
|
|
|
return self.name |
3244
|
|
|
|
3245
|
1 |
|
def get_upcoming_events(self): |
3246
|
|
|
"""Returns all the upcoming events for this user.""" |
3247
|
|
|
|
3248
|
|
|
doc = self._request(self.ws_prefix + '.getEvents', True) |
3249
|
|
|
|
3250
|
|
|
return _extract_events_from_doc(doc, self.network) |
3251
|
|
|
|
3252
|
1 |
|
def get_artist_tracks(self, artist, cacheable=False): |
3253
|
|
|
""" |
3254
|
|
|
Get a list of tracks by a given artist scrobbled by this user, |
3255
|
|
|
including scrobble time. |
3256
|
|
|
""" |
3257
|
|
|
# Not implemented: |
3258
|
|
|
# "Can be limited to specific timeranges, defaults to all time." |
3259
|
|
|
|
3260
|
|
|
params = self._get_params() |
3261
|
|
|
params['artist'] = artist |
3262
|
|
|
|
3263
|
|
|
seq = [] |
3264
|
|
|
for track in _collect_nodes( |
3265
|
|
|
None, |
3266
|
|
|
self, |
3267
|
|
|
self.ws_prefix + ".getArtistTracks", |
3268
|
|
|
cacheable, |
3269
|
|
|
params): |
3270
|
|
|
title = _extract(track, "name") |
3271
|
|
|
artist = _extract(track, "artist") |
3272
|
|
|
date = _extract(track, "date") |
3273
|
|
|
album = _extract(track, "album") |
3274
|
|
|
timestamp = track.getElementsByTagName( |
3275
|
|
|
"date")[0].getAttribute("uts") |
3276
|
|
|
|
3277
|
|
|
seq.append(PlayedTrack( |
3278
|
|
|
Track(artist, title, self.network), album, date, timestamp)) |
3279
|
|
|
|
3280
|
|
|
return seq |
3281
|
|
|
|
3282
|
1 |
|
def get_friends(self, limit=50, cacheable=False): |
3283
|
|
|
"""Returns a list of the user's friends. """ |
3284
|
|
|
|
3285
|
|
|
seq = [] |
3286
|
|
|
for node in _collect_nodes( |
3287
|
|
|
limit, |
3288
|
|
|
self, |
3289
|
|
|
self.ws_prefix + ".getFriends", |
3290
|
|
|
cacheable): |
3291
|
|
|
seq.append(User(_extract(node, "name"), self.network)) |
3292
|
|
|
|
3293
|
|
|
return seq |
3294
|
|
|
|
3295
|
1 |
|
def get_loved_tracks(self, limit=50, cacheable=True): |
3296
|
|
|
""" |
3297
|
|
|
Returns this user's loved track as a sequence of LovedTrack objects in |
3298
|
|
|
reverse order of their timestamp, all the way back to the first track. |
3299
|
|
|
|
3300
|
|
|
If limit==None, it will try to pull all the available data. |
3301
|
|
|
|
3302
|
|
|
This method uses caching. Enable caching only if you're pulling a |
3303
|
|
|
large amount of data. |
3304
|
|
|
|
3305
|
|
|
Use extract_items() with the return of this function to |
3306
|
|
|
get only a sequence of Track objects with no playback dates. |
3307
|
|
|
""" |
3308
|
|
|
|
3309
|
|
|
params = self._get_params() |
3310
|
|
|
if limit: |
3311
|
|
|
params['limit'] = limit |
3312
|
|
|
|
3313
|
|
|
seq = [] |
3314
|
|
|
for track in _collect_nodes( |
3315
|
|
|
limit, |
3316
|
|
|
self, |
3317
|
|
|
self.ws_prefix + ".getLovedTracks", |
3318
|
|
|
cacheable, |
3319
|
|
|
params): |
3320
|
|
|
title = _extract(track, "name") |
3321
|
|
|
artist = _extract(track, "name", 1) |
3322
|
|
|
date = _extract(track, "date") |
3323
|
|
|
timestamp = track.getElementsByTagName( |
3324
|
|
|
"date")[0].getAttribute("uts") |
3325
|
|
|
|
3326
|
|
|
seq.append(LovedTrack( |
3327
|
|
|
Track(artist, title, self.network), date, timestamp)) |
3328
|
|
|
|
3329
|
|
|
return seq |
3330
|
|
|
|
3331
|
1 |
|
def get_neighbours(self, limit=50, cacheable=True): |
3332
|
|
|
"""Returns a list of the user's friends.""" |
3333
|
|
|
|
3334
|
|
|
params = self._get_params() |
3335
|
|
|
if limit: |
3336
|
|
|
params['limit'] = limit |
3337
|
|
|
|
3338
|
|
|
doc = self._request( |
3339
|
|
|
self.ws_prefix + '.getNeighbours', cacheable, params) |
3340
|
|
|
|
3341
|
|
|
seq = [] |
3342
|
|
|
names = _extract_all(doc, 'name') |
3343
|
|
|
|
3344
|
|
|
for name in names: |
3345
|
|
|
seq.append(User(name, self.network)) |
3346
|
|
|
|
3347
|
|
|
return seq |
3348
|
|
|
|
3349
|
1 |
|
def get_past_events(self, limit=50, cacheable=False): |
3350
|
|
|
""" |
3351
|
|
|
Returns a sequence of Event objects |
3352
|
|
|
if limit==None it will return all |
3353
|
|
|
""" |
3354
|
|
|
|
3355
|
|
|
seq = [] |
3356
|
|
|
for node in _collect_nodes( |
3357
|
|
|
limit, |
3358
|
|
|
self, |
3359
|
|
|
self.ws_prefix + ".getPastEvents", |
3360
|
|
|
cacheable): |
3361
|
|
|
seq.append(Event(_extract(node, "id"), self.network)) |
3362
|
|
|
|
3363
|
|
|
return seq |
3364
|
|
|
|
3365
|
1 |
|
def get_playlists(self): |
3366
|
|
|
"""Returns a list of Playlists that this user owns.""" |
3367
|
|
|
|
3368
|
|
|
doc = self._request(self.ws_prefix + ".getPlaylists", True) |
3369
|
|
|
|
3370
|
|
|
playlists = [] |
3371
|
|
|
for playlist_id in _extract_all(doc, "id"): |
3372
|
|
|
playlists.append( |
3373
|
|
|
Playlist(self.get_name(), playlist_id, self.network)) |
3374
|
|
|
|
3375
|
|
|
return playlists |
3376
|
|
|
|
3377
|
1 |
|
def get_now_playing(self): |
3378
|
|
|
""" |
3379
|
|
|
Returns the currently playing track, or None if nothing is playing. |
3380
|
|
|
""" |
3381
|
|
|
|
3382
|
|
|
params = self._get_params() |
3383
|
|
|
params['limit'] = '1' |
3384
|
|
|
|
3385
|
|
|
doc = self._request(self.ws_prefix + '.getRecentTracks', False, params) |
3386
|
|
|
|
3387
|
|
|
tracks = doc.getElementsByTagName('track') |
3388
|
|
|
|
3389
|
|
|
if len(tracks) == 0: |
3390
|
|
|
return None |
3391
|
|
|
|
3392
|
|
|
e = tracks[0] |
3393
|
|
|
|
3394
|
|
|
if not e.hasAttribute('nowplaying'): |
3395
|
|
|
return None |
3396
|
|
|
|
3397
|
|
|
artist = _extract(e, 'artist') |
3398
|
|
|
title = _extract(e, 'name') |
3399
|
|
|
|
3400
|
|
|
return Track(artist, title, self.network, self.name) |
3401
|
|
|
|
3402
|
1 |
|
def get_recent_tracks(self, limit=10, cacheable=True, |
3403
|
|
|
time_from=None, time_to=None): |
3404
|
|
|
""" |
3405
|
|
|
Returns this user's played track as a sequence of PlayedTrack objects |
3406
|
|
|
in reverse order of playtime, all the way back to the first track. |
3407
|
|
|
|
3408
|
|
|
Parameters: |
3409
|
|
|
limit : If None, it will try to pull all the available data. |
3410
|
|
|
from (Optional) : Beginning timestamp of a range - only display |
3411
|
|
|
scrobbles after this time, in UNIX timestamp format (integer |
3412
|
|
|
number of seconds since 00:00:00, January 1st 1970 UTC). This |
3413
|
|
|
must be in the UTC time zone. |
3414
|
|
|
to (Optional) : End timestamp of a range - only display scrobbles |
3415
|
|
|
before this time, in UNIX timestamp format (integer number of |
3416
|
|
|
seconds since 00:00:00, January 1st 1970 UTC). This must be in |
3417
|
|
|
the UTC time zone. |
3418
|
|
|
|
3419
|
|
|
This method uses caching. Enable caching only if you're pulling a |
3420
|
|
|
large amount of data. |
3421
|
|
|
|
3422
|
|
|
Use extract_items() with the return of this function to |
3423
|
|
|
get only a sequence of Track objects with no playback dates. |
3424
|
|
|
""" |
3425
|
|
|
|
3426
|
|
|
params = self._get_params() |
3427
|
|
|
if limit: |
3428
|
|
|
params['limit'] = limit |
3429
|
|
|
if time_from: |
3430
|
|
|
params['from'] = time_from |
3431
|
|
|
if time_to: |
3432
|
|
|
params['to'] = time_to |
3433
|
|
|
|
3434
|
|
|
seq = [] |
3435
|
|
|
for track in _collect_nodes( |
3436
|
|
|
limit, |
3437
|
|
|
self, |
3438
|
|
|
self.ws_prefix + ".getRecentTracks", |
3439
|
|
|
cacheable, |
3440
|
|
|
params): |
3441
|
|
|
|
3442
|
|
|
if track.hasAttribute('nowplaying'): |
3443
|
|
|
continue # to prevent the now playing track from sneaking in |
3444
|
|
|
|
3445
|
|
|
title = _extract(track, "name") |
3446
|
|
|
artist = _extract(track, "artist") |
3447
|
|
|
date = _extract(track, "date") |
3448
|
|
|
album = _extract(track, "album") |
3449
|
|
|
timestamp = track.getElementsByTagName( |
3450
|
|
|
"date")[0].getAttribute("uts") |
3451
|
|
|
|
3452
|
|
|
seq.append(PlayedTrack( |
3453
|
|
|
Track(artist, title, self.network), album, date, timestamp)) |
3454
|
|
|
|
3455
|
|
|
return seq |
3456
|
|
|
|
3457
|
1 |
|
def get_id(self): |
3458
|
|
|
"""Returns the user ID.""" |
3459
|
|
|
|
3460
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3461
|
|
|
|
3462
|
|
|
return _extract(doc, "id") |
3463
|
|
|
|
3464
|
1 |
|
def get_language(self): |
3465
|
|
|
"""Returns the language code of the language used by the user.""" |
3466
|
|
|
|
3467
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3468
|
|
|
|
3469
|
|
|
return _extract(doc, "lang") |
3470
|
|
|
|
3471
|
1 |
|
def get_country(self): |
3472
|
|
|
"""Returns the name of the country of the user.""" |
3473
|
|
|
|
3474
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3475
|
|
|
|
3476
|
|
|
country = _extract(doc, "country") |
3477
|
|
|
|
3478
|
|
|
if country is None: |
3479
|
|
|
return None |
3480
|
|
|
else: |
3481
|
|
|
return Country(country, self.network) |
3482
|
|
|
|
3483
|
1 |
|
def get_age(self): |
3484
|
|
|
"""Returns the user's age.""" |
3485
|
|
|
|
3486
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3487
|
|
|
|
3488
|
|
|
return _number(_extract(doc, "age")) |
3489
|
|
|
|
3490
|
1 |
|
def get_gender(self): |
3491
|
|
|
"""Returns the user's gender. Either USER_MALE or USER_FEMALE.""" |
3492
|
|
|
|
3493
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3494
|
|
|
|
3495
|
|
|
value = _extract(doc, "gender") |
3496
|
|
|
|
3497
|
|
|
if value == 'm': |
3498
|
|
|
return USER_MALE |
3499
|
|
|
elif value == 'f': |
3500
|
|
|
return USER_FEMALE |
3501
|
|
|
|
3502
|
|
|
return None |
3503
|
|
|
|
3504
|
1 |
|
def is_subscriber(self): |
3505
|
|
|
"""Returns whether the user is a subscriber or not. True or False.""" |
3506
|
|
|
|
3507
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3508
|
|
|
|
3509
|
|
|
return _extract(doc, "subscriber") == "1" |
3510
|
|
|
|
3511
|
1 |
|
def get_playcount(self): |
3512
|
|
|
"""Returns the user's playcount so far.""" |
3513
|
|
|
|
3514
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3515
|
|
|
|
3516
|
|
|
return _number(_extract(doc, "playcount")) |
3517
|
|
|
|
3518
|
1 |
|
def get_registered(self): |
3519
|
|
|
"""Returns the user's registration date.""" |
3520
|
|
|
|
3521
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3522
|
|
|
|
3523
|
|
|
return _extract(doc, "registered") |
3524
|
|
|
|
3525
|
1 |
|
def get_unixtime_registered(self): |
3526
|
|
|
"""Returns the user's registration date as a UNIX timestamp.""" |
3527
|
|
|
|
3528
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3529
|
|
|
|
3530
|
|
|
return doc.getElementsByTagName( |
3531
|
|
|
"registered")[0].getAttribute("unixtime") |
3532
|
|
|
|
3533
|
1 |
|
def get_tagged_albums(self, tag, limit=None, cacheable=True): |
3534
|
|
|
"""Returns the albums tagged by a user.""" |
3535
|
|
|
|
3536
|
|
|
params = self._get_params() |
3537
|
|
|
params['tag'] = tag |
3538
|
|
|
params['taggingtype'] = 'album' |
3539
|
|
|
if limit: |
3540
|
|
|
params['limit'] = limit |
3541
|
|
|
doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, |
3542
|
|
|
params) |
3543
|
|
|
return _extract_albums(doc, self.network) |
3544
|
|
|
|
3545
|
1 |
|
def get_tagged_artists(self, tag, limit=None): |
3546
|
|
|
"""Returns the artists tagged by a user.""" |
3547
|
|
|
|
3548
|
|
|
params = self._get_params() |
3549
|
|
|
params['tag'] = tag |
3550
|
|
|
params['taggingtype'] = 'artist' |
3551
|
|
|
if limit: |
3552
|
|
|
params["limit"] = limit |
3553
|
|
|
doc = self._request(self.ws_prefix + '.getpersonaltags', True, params) |
3554
|
|
|
return _extract_artists(doc, self.network) |
3555
|
|
|
|
3556
|
1 |
|
def get_tagged_tracks(self, tag, limit=None, cacheable=True): |
3557
|
|
|
"""Returns the tracks tagged by a user.""" |
3558
|
|
|
|
3559
|
|
|
params = self._get_params() |
3560
|
|
|
params['tag'] = tag |
3561
|
|
|
params['taggingtype'] = 'track' |
3562
|
|
|
if limit: |
3563
|
|
|
params['limit'] = limit |
3564
|
|
|
doc = self._request(self.ws_prefix + '.getpersonaltags', cacheable, |
3565
|
|
|
params) |
3566
|
|
|
return _extract_tracks(doc, self.network) |
3567
|
|
|
|
3568
|
1 |
|
def get_top_albums( |
3569
|
|
|
self, period=PERIOD_OVERALL, limit=None, cacheable=True): |
3570
|
|
|
"""Returns the top albums played by a user. |
3571
|
|
|
* period: The period of time. Possible values: |
3572
|
|
|
o PERIOD_OVERALL |
3573
|
|
|
o PERIOD_7DAYS |
3574
|
|
|
o PERIOD_3MONTHS |
3575
|
|
|
o PERIOD_6MONTHS |
3576
|
|
|
o PERIOD_12MONTHS |
3577
|
|
|
""" |
3578
|
|
|
|
3579
|
|
|
params = self._get_params() |
3580
|
|
|
params['period'] = period |
3581
|
|
|
if limit: |
3582
|
|
|
params['limit'] = limit |
3583
|
|
|
|
3584
|
|
|
doc = self._request( |
3585
|
|
|
self.ws_prefix + '.getTopAlbums', cacheable, params) |
3586
|
|
|
|
3587
|
|
|
return _extract_top_albums(doc, self.network) |
3588
|
|
|
|
3589
|
1 |
|
def get_top_artists(self, period=PERIOD_OVERALL, limit=None): |
3590
|
|
|
"""Returns the top artists played by a user. |
3591
|
|
|
* period: The period of time. Possible values: |
3592
|
|
|
o PERIOD_OVERALL |
3593
|
|
|
o PERIOD_7DAYS |
3594
|
|
|
o PERIOD_3MONTHS |
3595
|
|
|
o PERIOD_6MONTHS |
3596
|
|
|
o PERIOD_12MONTHS |
3597
|
|
|
""" |
3598
|
|
|
|
3599
|
|
|
params = self._get_params() |
3600
|
|
|
params['period'] = period |
3601
|
|
|
if limit: |
3602
|
|
|
params["limit"] = limit |
3603
|
|
|
|
3604
|
|
|
doc = self._request(self.ws_prefix + '.getTopArtists', True, params) |
3605
|
|
|
|
3606
|
|
|
return _extract_top_artists(doc, self.network) |
3607
|
|
|
|
3608
|
1 |
|
def get_top_tags(self, limit=None, cacheable=True): |
3609
|
|
|
""" |
3610
|
|
|
Returns a sequence of the top tags used by this user with their counts |
3611
|
|
|
as TopItem objects. |
3612
|
|
|
* limit: The limit of how many tags to return. |
3613
|
|
|
* cacheable: Whether to cache results. |
3614
|
|
|
""" |
3615
|
|
|
|
3616
|
|
|
params = self._get_params() |
3617
|
|
|
if limit: |
3618
|
|
|
params["limit"] = limit |
3619
|
|
|
|
3620
|
|
|
doc = self._request(self.ws_prefix + ".getTopTags", cacheable, params) |
3621
|
|
|
|
3622
|
|
|
seq = [] |
3623
|
|
|
for node in doc.getElementsByTagName("tag"): |
3624
|
|
|
seq.append(TopItem( |
3625
|
|
|
Tag(_extract(node, "name"), self.network), |
3626
|
|
|
_extract(node, "count"))) |
3627
|
|
|
|
3628
|
|
|
return seq |
3629
|
|
|
|
3630
|
1 |
|
def get_top_tracks( |
3631
|
|
|
self, period=PERIOD_OVERALL, limit=None, cacheable=True): |
3632
|
|
|
"""Returns the top tracks played by a user. |
3633
|
|
|
* period: The period of time. Possible values: |
3634
|
|
|
o PERIOD_OVERALL |
3635
|
|
|
o PERIOD_7DAYS |
3636
|
|
|
o PERIOD_3MONTHS |
3637
|
|
|
o PERIOD_6MONTHS |
3638
|
|
|
o PERIOD_12MONTHS |
3639
|
|
|
""" |
3640
|
|
|
|
3641
|
|
|
params = self._get_params() |
3642
|
|
|
params['period'] = period |
3643
|
|
|
if limit: |
3644
|
|
|
params['limit'] = limit |
3645
|
|
|
|
3646
|
|
|
return self._get_things( |
3647
|
|
|
"getTopTracks", "track", Track, params, cacheable) |
3648
|
|
|
|
3649
|
1 |
|
def compare_with_user(self, user, shared_artists_limit=None): |
3650
|
|
|
""" |
3651
|
|
|
Compare this user with another Last.fm user. |
3652
|
|
|
Returns a sequence: |
3653
|
|
|
(tasteometer_score, (shared_artist1, shared_artist2, ...)) |
3654
|
|
|
user: A User object or a username string/unicode object. |
3655
|
|
|
""" |
3656
|
|
|
|
3657
|
|
|
if isinstance(user, User): |
3658
|
|
|
user = user.get_name() |
3659
|
|
|
|
3660
|
|
|
params = self._get_params() |
3661
|
|
|
if shared_artists_limit: |
3662
|
|
|
params['limit'] = shared_artists_limit |
3663
|
|
|
params['type1'] = 'user' |
3664
|
|
|
params['type2'] = 'user' |
3665
|
|
|
params['value1'] = self.get_name() |
3666
|
|
|
params['value2'] = user |
3667
|
|
|
|
3668
|
|
|
doc = self._request('tasteometer.compare', False, params) |
3669
|
|
|
|
3670
|
|
|
score = _extract(doc, 'score') |
3671
|
|
|
|
3672
|
|
|
artists = doc.getElementsByTagName('artists')[0] |
3673
|
|
|
shared_artists_names = _extract_all(artists, 'name') |
3674
|
|
|
|
3675
|
|
|
shared_artists_seq = [] |
3676
|
|
|
|
3677
|
|
|
for name in shared_artists_names: |
3678
|
|
|
shared_artists_seq.append(Artist(name, self.network)) |
3679
|
|
|
|
3680
|
|
|
return (score, shared_artists_seq) |
3681
|
|
|
|
3682
|
1 |
|
def get_image(self): |
3683
|
|
|
"""Returns the user's avatar.""" |
3684
|
|
|
|
3685
|
|
|
doc = self._request(self.ws_prefix + ".getInfo", True) |
3686
|
|
|
|
3687
|
|
|
return _extract(doc, "image") |
3688
|
|
|
|
3689
|
1 |
|
def get_url(self, domain_name=DOMAIN_ENGLISH): |
3690
|
|
|
"""Returns the url of the user page on the network. |
3691
|
|
|
* domain_name: The network's language domain. Possible values: |
3692
|
|
|
o DOMAIN_ENGLISH |
3693
|
|
|
o DOMAIN_GERMAN |
3694
|
|
|
o DOMAIN_SPANISH |
3695
|
|
|
o DOMAIN_FRENCH |
3696
|
|
|
o DOMAIN_ITALIAN |
3697
|
|
|
o DOMAIN_POLISH |
3698
|
|
|
o DOMAIN_PORTUGUESE |
3699
|
|
|
o DOMAIN_SWEDISH |
3700
|
|
|
o DOMAIN_TURKISH |
3701
|
|
|
o DOMAIN_RUSSIAN |
3702
|
|
|
o DOMAIN_JAPANESE |
3703
|
|
|
o DOMAIN_CHINESE |
3704
|
|
|
""" |
3705
|
|
|
|
3706
|
|
|
name = _url_safe(self.get_name()) |
3707
|
|
|
|
3708
|
|
|
return self.network._get_url(domain_name, "user") % {'name': name} |
3709
|
|
|
|
3710
|
1 |
|
def get_library(self): |
3711
|
|
|
"""Returns the associated Library object. """ |
3712
|
|
|
|
3713
|
|
|
return Library(self, self.network) |
3714
|
|
|
|
3715
|
1 |
|
def shout(self, message): |
3716
|
|
|
""" |
3717
|
|
|
Post a shout |
3718
|
|
|
""" |
3719
|
|
|
|
3720
|
|
|
params = self._get_params() |
3721
|
|
|
params["message"] = message |
3722
|
|
|
|
3723
|
|
|
self._request(self.ws_prefix + ".Shout", False, params) |
3724
|
|
|
|
3725
|
|
|
|
3726
|
1 |
|
class AuthenticatedUser(User): |
3727
|
1 |
|
def __init__(self, network): |
3728
|
|
|
User.__init__(self, "", network) |
3729
|
|
|
|
3730
|
1 |
|
def _get_params(self): |
3731
|
|
|
return {"user": self.get_name()} |
3732
|
|
|
|
3733
|
1 |
|
def get_name(self): |
3734
|
|
|
"""Returns the name of the authenticated user.""" |
3735
|
|
|
|
3736
|
|
|
doc = self._request("user.getInfo", True, {"user": ""}) # hack |
3737
|
|
|
|
3738
|
|
|
self.name = _extract(doc, "name") |
3739
|
|
|
return self.name |
3740
|
|
|
|
3741
|
1 |
|
def get_recommended_events(self, limit=50, cacheable=False): |
3742
|
|
|
""" |
3743
|
|
|
Returns a sequence of Event objects |
3744
|
|
|
if limit==None it will return all |
3745
|
|
|
""" |
3746
|
|
|
|
3747
|
|
|
seq = [] |
3748
|
|
|
for node in _collect_nodes( |
3749
|
|
|
limit, self, "user.getRecommendedEvents", cacheable): |
3750
|
|
|
seq.append(Event(_extract(node, "id"), self.network)) |
3751
|
|
|
|
3752
|
|
|
return seq |
3753
|
|
|
|
3754
|
1 |
|
def get_recommended_artists(self, limit=50, cacheable=False): |
3755
|
|
|
""" |
3756
|
|
|
Returns a sequence of Artist objects |
3757
|
|
|
if limit==None it will return all |
3758
|
|
|
""" |
3759
|
|
|
|
3760
|
|
|
seq = [] |
3761
|
|
|
for node in _collect_nodes( |
3762
|
|
|
limit, self, "user.getRecommendedArtists", cacheable): |
3763
|
|
|
seq.append(Artist(_extract(node, "name"), self.network)) |
3764
|
|
|
|
3765
|
|
|
return seq |
3766
|
|
|
|
3767
|
|
|
|
3768
|
1 |
|
class _Search(_BaseObject): |
3769
|
|
|
"""An abstract class. Use one of its derivatives.""" |
3770
|
|
|
|
3771
|
1 |
|
def __init__(self, ws_prefix, search_terms, network): |
3772
|
|
|
_BaseObject.__init__(self, network, ws_prefix) |
3773
|
|
|
|
3774
|
|
|
self._ws_prefix = ws_prefix |
3775
|
|
|
self.search_terms = search_terms |
3776
|
|
|
|
3777
|
|
|
self._last_page_index = 0 |
3778
|
|
|
|
3779
|
1 |
|
def _get_params(self): |
3780
|
|
|
params = {} |
3781
|
|
|
|
3782
|
|
|
for key in self.search_terms.keys(): |
3783
|
|
|
params[key] = self.search_terms[key] |
3784
|
|
|
|
3785
|
|
|
return params |
3786
|
|
|
|
3787
|
1 |
|
def get_total_result_count(self): |
3788
|
|
|
"""Returns the total count of all the results.""" |
3789
|
|
|
|
3790
|
|
|
doc = self._request(self._ws_prefix + ".search", True) |
3791
|
|
|
|
3792
|
|
|
return _extract(doc, "opensearch:totalResults") |
3793
|
|
|
|
3794
|
1 |
|
def _retrieve_page(self, page_index): |
3795
|
|
|
"""Returns the node of matches to be processed""" |
3796
|
|
|
|
3797
|
|
|
params = self._get_params() |
3798
|
|
|
params["page"] = str(page_index) |
3799
|
|
|
doc = self._request(self._ws_prefix + ".search", True, params) |
3800
|
|
|
|
3801
|
|
|
return doc.getElementsByTagName(self._ws_prefix + "matches")[0] |
3802
|
|
|
|
3803
|
1 |
|
def _retrieve_next_page(self): |
3804
|
|
|
self._last_page_index += 1 |
3805
|
|
|
return self._retrieve_page(self._last_page_index) |
3806
|
|
|
|
3807
|
|
|
|
3808
|
1 |
|
class AlbumSearch(_Search): |
3809
|
|
|
"""Search for an album by name.""" |
3810
|
|
|
|
3811
|
1 |
|
def __init__(self, album_name, network): |
3812
|
|
|
|
3813
|
|
|
_Search.__init__(self, "album", {"album": album_name}, network) |
3814
|
|
|
|
3815
|
1 |
|
def get_next_page(self): |
3816
|
|
|
"""Returns the next page of results as a sequence of Album objects.""" |
3817
|
|
|
|
3818
|
|
|
master_node = self._retrieve_next_page() |
3819
|
|
|
|
3820
|
|
|
seq = [] |
3821
|
|
|
for node in master_node.getElementsByTagName("album"): |
3822
|
|
|
seq.append(Album( |
3823
|
|
|
_extract(node, "artist"), |
3824
|
|
|
_extract(node, "name"), |
3825
|
|
|
self.network)) |
3826
|
|
|
|
3827
|
|
|
return seq |
3828
|
|
|
|
3829
|
|
|
|
3830
|
1 |
|
class ArtistSearch(_Search): |
3831
|
|
|
"""Search for an artist by artist name.""" |
3832
|
|
|
|
3833
|
1 |
|
def __init__(self, artist_name, network): |
3834
|
|
|
_Search.__init__(self, "artist", {"artist": artist_name}, network) |
3835
|
|
|
|
3836
|
1 |
|
def get_next_page(self): |
3837
|
|
|
"""Returns the next page of results as a sequence of Artist objects.""" |
3838
|
|
|
|
3839
|
|
|
master_node = self._retrieve_next_page() |
3840
|
|
|
|
3841
|
|
|
seq = [] |
3842
|
|
|
for node in master_node.getElementsByTagName("artist"): |
3843
|
|
|
artist = Artist(_extract(node, "name"), self.network) |
3844
|
|
|
artist.listener_count = _number(_extract(node, "listeners")) |
3845
|
|
|
seq.append(artist) |
3846
|
|
|
|
3847
|
|
|
return seq |
3848
|
|
|
|
3849
|
|
|
|
3850
|
1 |
|
class TagSearch(_Search): |
3851
|
|
|
"""Search for a tag by tag name.""" |
3852
|
|
|
|
3853
|
1 |
|
def __init__(self, tag_name, network): |
3854
|
|
|
|
3855
|
|
|
_Search.__init__(self, "tag", {"tag": tag_name}, network) |
3856
|
|
|
|
3857
|
1 |
|
def get_next_page(self): |
3858
|
|
|
"""Returns the next page of results as a sequence of Tag objects.""" |
3859
|
|
|
|
3860
|
|
|
master_node = self._retrieve_next_page() |
3861
|
|
|
|
3862
|
|
|
seq = [] |
3863
|
|
|
for node in master_node.getElementsByTagName("tag"): |
3864
|
|
|
tag = Tag(_extract(node, "name"), self.network) |
3865
|
|
|
tag.tag_count = _number(_extract(node, "count")) |
3866
|
|
|
seq.append(tag) |
3867
|
|
|
|
3868
|
|
|
return seq |
3869
|
|
|
|
3870
|
|
|
|
3871
|
1 |
|
class TrackSearch(_Search): |
3872
|
|
|
""" |
3873
|
|
|
Search for a track by track title. If you don't want to narrow the results |
3874
|
|
|
down by specifying the artist name, set it to empty string. |
3875
|
|
|
""" |
3876
|
|
|
|
3877
|
1 |
|
def __init__(self, artist_name, track_title, network): |
3878
|
|
|
|
3879
|
|
|
_Search.__init__( |
3880
|
|
|
self, |
3881
|
|
|
"track", |
3882
|
|
|
{"track": track_title, "artist": artist_name}, |
3883
|
|
|
network) |
3884
|
|
|
|
3885
|
1 |
|
def get_next_page(self): |
3886
|
|
|
"""Returns the next page of results as a sequence of Track objects.""" |
3887
|
|
|
|
3888
|
|
|
master_node = self._retrieve_next_page() |
3889
|
|
|
|
3890
|
|
|
seq = [] |
3891
|
|
|
for node in master_node.getElementsByTagName("track"): |
3892
|
|
|
track = Track( |
3893
|
|
|
_extract(node, "artist"), |
3894
|
|
|
_extract(node, "name"), |
3895
|
|
|
self.network) |
3896
|
|
|
track.listener_count = _number(_extract(node, "listeners")) |
3897
|
|
|
seq.append(track) |
3898
|
|
|
|
3899
|
|
|
return seq |
3900
|
|
|
|
3901
|
|
|
|
3902
|
1 |
|
class VenueSearch(_Search): |
3903
|
|
|
""" |
3904
|
|
|
Search for a venue by its name. If you don't want to narrow the results |
3905
|
|
|
down by specifying a country, set it to empty string. |
3906
|
|
|
""" |
3907
|
|
|
|
3908
|
1 |
|
def __init__(self, venue_name, country_name, network): |
3909
|
|
|
|
3910
|
|
|
_Search.__init__( |
3911
|
|
|
self, |
3912
|
|
|
"venue", |
3913
|
|
|
{"venue": venue_name, "country": country_name}, |
3914
|
|
|
network) |
3915
|
|
|
|
3916
|
1 |
|
def get_next_page(self): |
3917
|
|
|
"""Returns the next page of results as a sequence of Track objects.""" |
3918
|
|
|
|
3919
|
|
|
master_node = self._retrieve_next_page() |
3920
|
|
|
|
3921
|
|
|
seq = [] |
3922
|
|
|
for node in master_node.getElementsByTagName("venue"): |
3923
|
|
|
seq.append(Venue(_extract(node, "id"), self.network)) |
3924
|
|
|
|
3925
|
|
|
return seq |
3926
|
|
|
|
3927
|
|
|
|
3928
|
1 |
|
class Venue(_BaseObject): |
3929
|
|
|
"""A venue where events are held.""" |
3930
|
|
|
|
3931
|
|
|
# TODO: waiting for a venue.getInfo web service to use. |
3932
|
|
|
# TODO: As an intermediate use case, can pass the venue DOM element when |
3933
|
|
|
# using Event.get_venue() to populate the venue info, if the venue.getInfo |
3934
|
|
|
# API call becomes available this workaround should be removed |
3935
|
|
|
|
3936
|
1 |
|
id = None |
3937
|
1 |
|
info = None |
3938
|
1 |
|
name = None |
3939
|
1 |
|
location = None |
3940
|
1 |
|
url = None |
3941
|
|
|
|
3942
|
1 |
|
__hash__ = _BaseObject.__hash__ |
3943
|
|
|
|
3944
|
1 |
|
def __init__(self, netword_id, network, venue_element=None): |
3945
|
|
|
_BaseObject.__init__(self, network, "venue") |
3946
|
|
|
|
3947
|
|
|
self.id = _number(netword_id) |
3948
|
|
|
if venue_element is not None: |
3949
|
|
|
self.info = _extract_element_tree(venue_element) |
3950
|
|
|
self.name = self.info.get('name') |
3951
|
|
|
self.url = self.info.get('url') |
3952
|
|
|
self.location = self.info.get('location') |
3953
|
|
|
|
3954
|
1 |
|
def __repr__(self): |
3955
|
|
|
return "pylast.Venue(%s, %s)" % (repr(self.id), repr(self.network)) |
3956
|
|
|
|
3957
|
1 |
|
@_string_output |
3958
|
|
|
def __str__(self): |
3959
|
|
|
return "Venue #" + str(self.id) |
3960
|
|
|
|
3961
|
1 |
|
def __eq__(self, other): |
3962
|
|
|
return self.get_id() == other.get_id() |
3963
|
|
|
|
3964
|
1 |
|
def _get_params(self): |
3965
|
|
|
return {self.ws_prefix: self.get_id()} |
3966
|
|
|
|
3967
|
1 |
|
def get_id(self): |
3968
|
|
|
"""Returns the id of the venue.""" |
3969
|
|
|
|
3970
|
|
|
return self.id |
3971
|
|
|
|
3972
|
1 |
|
def get_name(self): |
3973
|
|
|
"""Returns the name of the venue.""" |
3974
|
|
|
|
3975
|
|
|
return self.name |
3976
|
|
|
|
3977
|
1 |
|
def get_url(self): |
3978
|
|
|
"""Returns the URL of the venue page.""" |
3979
|
|
|
|
3980
|
|
|
return self.url |
3981
|
|
|
|
3982
|
1 |
|
def get_location(self): |
3983
|
|
|
"""Returns the location of the venue (dictionary).""" |
3984
|
|
|
|
3985
|
|
|
return self.location |
3986
|
|
|
|
3987
|
1 |
|
def get_upcoming_events(self): |
3988
|
|
|
"""Returns the upcoming events in this venue.""" |
3989
|
|
|
|
3990
|
|
|
doc = self._request(self.ws_prefix + ".getEvents", True) |
3991
|
|
|
|
3992
|
|
|
return _extract_events_from_doc(doc, self.network) |
3993
|
|
|
|
3994
|
1 |
|
def get_past_events(self): |
3995
|
|
|
"""Returns the past events held in this venue.""" |
3996
|
|
|
|
3997
|
|
|
doc = self._request(self.ws_prefix + ".getEvents", True) |
3998
|
|
|
|
3999
|
|
|
return _extract_events_from_doc(doc, self.network) |
4000
|
|
|
|
4001
|
|
|
|
4002
|
1 |
|
def md5(text): |
4003
|
|
|
"""Returns the md5 hash of a string.""" |
4004
|
|
|
|
4005
|
|
|
h = hashlib.md5() |
4006
|
|
|
h.update(_unicode(text).encode("utf-8")) |
4007
|
|
|
|
4008
|
|
|
return h.hexdigest() |
4009
|
|
|
|
4010
|
|
|
|
4011
|
1 |
|
def _unicode(text): |
4012
|
1 |
|
if isinstance(text, six.binary_type): |
4013
|
|
|
return six.text_type(text, "utf-8") |
4014
|
1 |
|
elif isinstance(text, six.text_type): |
4015
|
1 |
|
return text |
4016
|
|
|
else: |
4017
|
1 |
|
return six.text_type(text) |
4018
|
|
|
|
4019
|
|
|
|
4020
|
1 |
|
def _string(string): |
4021
|
|
|
"""For Python2 routines that can only process str type.""" |
4022
|
1 |
|
if isinstance(string, str): |
4023
|
1 |
|
return string |
4024
|
|
|
casted = six.text_type(string) |
4025
|
|
|
if sys.version_info[0] == 2: |
4026
|
|
|
casted = casted.encode("utf-8") |
4027
|
|
|
return casted |
4028
|
|
|
|
4029
|
|
|
|
4030
|
1 |
|
def cleanup_nodes(doc): |
4031
|
|
|
""" |
4032
|
|
|
Remove text nodes containing only whitespace |
4033
|
|
|
""" |
4034
|
|
|
for node in doc.documentElement.childNodes: |
4035
|
|
|
if node.nodeType == Node.TEXT_NODE and node.nodeValue.isspace(): |
4036
|
|
|
doc.documentElement.removeChild(node) |
4037
|
|
|
return doc |
4038
|
|
|
|
4039
|
|
|
|
4040
|
1 |
|
def _collect_nodes(limit, sender, method_name, cacheable, params=None): |
4041
|
|
|
""" |
4042
|
|
|
Returns a sequence of dom.Node objects about as close to limit as possible |
4043
|
|
|
""" |
4044
|
|
|
|
4045
|
|
|
if not params: |
4046
|
|
|
params = sender._get_params() |
4047
|
|
|
|
4048
|
|
|
nodes = [] |
4049
|
|
|
page = 1 |
4050
|
|
|
end_of_pages = False |
4051
|
|
|
|
4052
|
|
|
while not end_of_pages and (not limit or (limit and len(nodes) < limit)): |
4053
|
|
|
params["page"] = str(page) |
4054
|
|
|
doc = sender._request(method_name, cacheable, params) |
4055
|
|
|
doc = cleanup_nodes(doc) |
4056
|
|
|
|
4057
|
|
|
main = doc.documentElement.childNodes[0] |
4058
|
|
|
|
4059
|
|
|
if main.hasAttribute("totalPages"): |
4060
|
|
|
total_pages = _number(main.getAttribute("totalPages")) |
4061
|
|
|
elif main.hasAttribute("totalpages"): |
4062
|
|
|
total_pages = _number(main.getAttribute("totalpages")) |
4063
|
|
|
else: |
4064
|
|
|
raise Exception("No total pages attribute") |
4065
|
|
|
|
4066
|
|
|
for node in main.childNodes: |
4067
|
|
|
if not node.nodeType == xml.dom.Node.TEXT_NODE and ( |
4068
|
|
|
not limit or (len(nodes) < limit)): |
4069
|
|
|
nodes.append(node) |
4070
|
|
|
|
4071
|
|
|
if page >= total_pages: |
4072
|
|
|
end_of_pages = True |
4073
|
|
|
|
4074
|
|
|
page += 1 |
4075
|
|
|
|
4076
|
|
|
return nodes |
4077
|
|
|
|
4078
|
|
|
|
4079
|
1 |
|
def _extract(node, name, index=0): |
4080
|
|
|
"""Extracts a value from the xml string""" |
4081
|
|
|
|
4082
|
|
|
nodes = node.getElementsByTagName(name) |
4083
|
|
|
|
4084
|
|
|
if len(nodes): |
4085
|
|
|
if nodes[index].firstChild: |
4086
|
|
|
return _unescape_htmlentity(nodes[index].firstChild.data.strip()) |
4087
|
|
|
else: |
4088
|
|
|
return None |
4089
|
|
|
|
4090
|
|
|
|
4091
|
1 |
|
def _extract_element_tree(node): |
4092
|
|
|
"""Extract an element tree into a multi-level dictionary |
4093
|
|
|
|
4094
|
|
|
NB: If any elements have text nodes as well as nested |
4095
|
|
|
elements this will ignore the text nodes""" |
4096
|
|
|
|
4097
|
|
|
def _recurse_build_tree(rootNode, targetDict): |
4098
|
|
|
"""Recursively build a multi-level dict""" |
4099
|
|
|
|
4100
|
|
|
def _has_child_elements(rootNode): |
4101
|
|
|
"""Check if an element has any nested (child) elements""" |
4102
|
|
|
|
4103
|
|
|
for node in rootNode.childNodes: |
4104
|
|
|
if node.nodeType == node.ELEMENT_NODE: |
4105
|
|
|
return True |
4106
|
|
|
return False |
4107
|
|
|
|
4108
|
|
|
for node in rootNode.childNodes: |
4109
|
|
|
if node.nodeType == node.ELEMENT_NODE: |
4110
|
|
|
if _has_child_elements(node): |
4111
|
|
|
targetDict[node.tagName] = {} |
4112
|
|
|
_recurse_build_tree(node, targetDict[node.tagName]) |
4113
|
|
|
else: |
4114
|
|
|
val = None if node.firstChild is None else \ |
4115
|
|
|
_unescape_htmlentity(node.firstChild.data.strip()) |
4116
|
|
|
targetDict[node.tagName] = val |
4117
|
|
|
return targetDict |
4118
|
|
|
|
4119
|
|
|
return _recurse_build_tree(node, {}) |
4120
|
|
|
|
4121
|
|
|
|
4122
|
1 |
|
def _extract_all(node, name, limit_count=None): |
4123
|
|
|
"""Extracts all the values from the xml string. returning a list.""" |
4124
|
|
|
|
4125
|
|
|
seq = [] |
4126
|
|
|
|
4127
|
|
|
for i in range(0, len(node.getElementsByTagName(name))): |
4128
|
|
|
if len(seq) == limit_count: |
4129
|
|
|
break |
4130
|
|
|
|
4131
|
|
|
seq.append(_extract(node, name, i)) |
4132
|
|
|
|
4133
|
|
|
return seq |
4134
|
|
|
|
4135
|
|
|
|
4136
|
1 |
|
def _extract_top_artists(doc, network): |
4137
|
|
|
# TODO Maybe include the _request here too? |
4138
|
|
|
seq = [] |
4139
|
|
|
for node in doc.getElementsByTagName("artist"): |
4140
|
|
|
name = _extract(node, "name") |
4141
|
|
|
playcount = _extract(node, "playcount") |
4142
|
|
|
|
4143
|
|
|
seq.append(TopItem(Artist(name, network), playcount)) |
4144
|
|
|
|
4145
|
|
|
return seq |
4146
|
|
|
|
4147
|
|
|
|
4148
|
1 |
|
def _extract_top_albums(doc, network): |
4149
|
|
|
# TODO Maybe include the _request here too? |
4150
|
|
|
seq = [] |
4151
|
|
|
for node in doc.getElementsByTagName("album"): |
4152
|
|
|
name = _extract(node, "name") |
4153
|
|
|
artist = _extract(node, "name", 1) |
4154
|
|
|
playcount = _extract(node, "playcount") |
4155
|
|
|
|
4156
|
|
|
seq.append(TopItem(Album(artist, name, network), playcount)) |
4157
|
|
|
|
4158
|
|
|
return seq |
4159
|
|
|
|
4160
|
|
|
|
4161
|
1 |
|
def _extract_artists(doc, network): |
4162
|
|
|
seq = [] |
4163
|
|
|
for node in doc.getElementsByTagName("artist"): |
4164
|
|
|
seq.append(Artist(_extract(node, "name"), network)) |
4165
|
|
|
return seq |
4166
|
|
|
|
4167
|
|
|
|
4168
|
1 |
|
def _extract_albums(doc, network): |
4169
|
|
|
seq = [] |
4170
|
|
|
for node in doc.getElementsByTagName("album"): |
4171
|
|
|
name = _extract(node, "name") |
4172
|
|
|
artist = _extract(node, "name", 1) |
4173
|
|
|
seq.append(Album(artist, name, network)) |
4174
|
|
|
return seq |
4175
|
|
|
|
4176
|
|
|
|
4177
|
1 |
|
def _extract_tracks(doc, network): |
4178
|
|
|
seq = [] |
4179
|
|
|
for node in doc.getElementsByTagName("track"): |
4180
|
|
|
name = _extract(node, "name") |
4181
|
|
|
artist = _extract(node, "name", 1) |
4182
|
|
|
seq.append(Track(artist, name, network)) |
4183
|
|
|
return seq |
4184
|
|
|
|
4185
|
|
|
|
4186
|
1 |
|
def _extract_events_from_doc(doc, network): |
4187
|
|
|
events = [] |
4188
|
|
|
for node in doc.getElementsByTagName("event"): |
4189
|
|
|
events.append(Event(_extract(node, "id"), network)) |
4190
|
|
|
return events |
4191
|
|
|
|
4192
|
|
|
|
4193
|
1 |
|
def _url_safe(text): |
4194
|
|
|
"""Does all kinds of tricks on a text to make it safe to use in a url.""" |
4195
|
|
|
|
4196
|
|
|
return url_quote_plus(url_quote_plus(_string(text))).lower() |
4197
|
|
|
|
4198
|
|
|
|
4199
|
1 |
|
def _number(string): |
4200
|
|
|
""" |
4201
|
|
|
Extracts an int from a string. |
4202
|
|
|
Returns a 0 if None or an empty string was passed. |
4203
|
|
|
""" |
4204
|
|
|
|
4205
|
|
|
if not string: |
4206
|
|
|
return 0 |
4207
|
|
|
elif string == "": |
4208
|
|
|
return 0 |
4209
|
|
|
else: |
4210
|
|
|
try: |
4211
|
|
|
return int(string) |
4212
|
|
|
except ValueError: |
4213
|
|
|
return float(string) |
4214
|
|
|
|
4215
|
|
|
|
4216
|
1 |
|
def _unescape_htmlentity(string): |
4217
|
|
|
|
4218
|
|
|
# string = _unicode(string) |
4219
|
|
|
|
4220
|
|
|
mapping = htmlentitydefs.name2codepoint |
4221
|
|
|
for key in mapping: |
4222
|
|
|
string = string.replace("&%s;" % key, unichr(mapping[key])) |
4223
|
|
|
|
4224
|
|
|
return string |
4225
|
|
|
|
4226
|
|
|
|
4227
|
1 |
|
def extract_items(topitems_or_libraryitems): |
4228
|
|
|
""" |
4229
|
|
|
Extracts a sequence of items from a sequence of TopItem or |
4230
|
|
|
LibraryItem objects. |
4231
|
|
|
""" |
4232
|
|
|
|
4233
|
|
|
seq = [] |
4234
|
|
|
for i in topitems_or_libraryitems: |
4235
|
|
|
seq.append(i.item) |
4236
|
|
|
|
4237
|
|
|
return seq |
4238
|
|
|
|
4239
|
|
|
|
4240
|
1 |
|
class ScrobblingError(Exception): |
4241
|
1 |
|
def __init__(self, message): |
4242
|
|
|
Exception.__init__(self) |
4243
|
|
|
self.message = message |
4244
|
|
|
|
4245
|
1 |
|
@_string_output |
4246
|
|
|
def __str__(self): |
4247
|
|
|
return self.message |
4248
|
|
|
|
4249
|
|
|
|
4250
|
1 |
|
class BannedClientError(ScrobblingError): |
4251
|
1 |
|
def __init__(self): |
4252
|
|
|
ScrobblingError.__init__( |
4253
|
|
|
self, "This version of the client has been banned") |
4254
|
|
|
|
4255
|
|
|
|
4256
|
1 |
|
class BadAuthenticationError(ScrobblingError): |
4257
|
1 |
|
def __init__(self): |
4258
|
|
|
ScrobblingError.__init__(self, "Bad authentication token") |
4259
|
|
|
|
4260
|
|
|
|
4261
|
1 |
|
class BadTimeError(ScrobblingError): |
4262
|
1 |
|
def __init__(self): |
4263
|
|
|
ScrobblingError.__init__( |
4264
|
|
|
self, "Time provided is not close enough to current time") |
4265
|
|
|
|
4266
|
|
|
|
4267
|
1 |
|
class BadSessionError(ScrobblingError): |
4268
|
1 |
|
def __init__(self): |
4269
|
|
|
ScrobblingError.__init__( |
4270
|
|
|
self, "Bad session id, consider re-handshaking") |
4271
|
|
|
|
4272
|
|
|
|
4273
|
1 |
|
class _ScrobblerRequest(object): |
4274
|
|
|
|
4275
|
1 |
|
def __init__(self, url, params, network, request_type="POST"): |
4276
|
|
|
|
4277
|
|
|
for key in params: |
4278
|
|
|
params[key] = str(params[key]) |
4279
|
|
|
|
4280
|
|
|
self.params = params |
4281
|
|
|
self.type = request_type |
4282
|
|
|
(self.hostname, self.subdir) = url_split_host(url[len("http:"):]) |
4283
|
|
|
self.network = network |
4284
|
|
|
|
4285
|
1 |
|
def execute(self): |
4286
|
|
|
"""Returns a string response of this request.""" |
4287
|
|
|
|
4288
|
|
|
connection = HTTPConnection(self.hostname) |
4289
|
|
|
|
4290
|
|
|
data = [] |
4291
|
|
|
for name in self.params.keys(): |
4292
|
|
|
value = url_quote_plus(self.params[name]) |
4293
|
|
|
data.append('='.join((name, value))) |
4294
|
|
|
data = "&".join(data) |
4295
|
|
|
|
4296
|
|
|
headers = { |
4297
|
|
|
"Content-type": "application/x-www-form-urlencoded", |
4298
|
|
|
"Accept-Charset": "utf-8", |
4299
|
|
|
"User-Agent": "pylast" + "/" + __version__, |
4300
|
|
|
"HOST": self.hostname |
4301
|
|
|
} |
4302
|
|
|
|
4303
|
|
|
if self.type == "GET": |
4304
|
|
|
connection.request( |
4305
|
|
|
"GET", self.subdir + "?" + data, headers=headers) |
4306
|
|
|
else: |
4307
|
|
|
connection.request("POST", self.subdir, data, headers) |
4308
|
|
|
response = _unicode(connection.getresponse().read()) |
4309
|
|
|
|
4310
|
|
|
self._check_response_for_errors(response) |
4311
|
|
|
|
4312
|
|
|
return response |
4313
|
|
|
|
4314
|
1 |
|
def _check_response_for_errors(self, response): |
4315
|
|
|
""" |
4316
|
|
|
When passed a string response it checks for errors, raising any |
4317
|
|
|
exceptions as necessary. |
4318
|
|
|
""" |
4319
|
|
|
|
4320
|
|
|
lines = response.split("\n") |
4321
|
|
|
status_line = lines[0] |
4322
|
|
|
|
4323
|
|
|
if status_line == "OK": |
4324
|
|
|
return |
4325
|
|
|
elif status_line == "BANNED": |
4326
|
|
|
raise BannedClientError() |
4327
|
|
|
elif status_line == "BADAUTH": |
4328
|
|
|
raise BadAuthenticationError() |
4329
|
|
|
elif status_line == "BADTIME": |
4330
|
|
|
raise BadTimeError() |
4331
|
|
|
elif status_line == "BADSESSION": |
4332
|
|
|
raise BadSessionError() |
4333
|
|
|
elif status_line.startswith("FAILED "): |
4334
|
|
|
reason = status_line[status_line.find("FAILED ") + len("FAILED "):] |
4335
|
|
|
raise ScrobblingError(reason) |
4336
|
|
|
|
4337
|
|
|
|
4338
|
1 |
|
class Scrobbler(object): |
4339
|
|
|
"""A class for scrobbling tracks to Last.fm""" |
4340
|
|
|
|
4341
|
1 |
|
session_id = None |
4342
|
1 |
|
nowplaying_url = None |
4343
|
1 |
|
submissions_url = None |
4344
|
|
|
|
4345
|
1 |
|
def __init__(self, network, client_id, client_version): |
4346
|
|
|
self.client_id = client_id |
4347
|
|
|
self.client_version = client_version |
4348
|
|
|
self.username = network.username |
4349
|
|
|
self.password = network.password_hash |
4350
|
|
|
self.network = network |
4351
|
|
|
|
4352
|
1 |
|
def _do_handshake(self): |
4353
|
|
|
"""Handshakes with the server""" |
4354
|
|
|
|
4355
|
|
|
timestamp = str(int(time.time())) |
4356
|
|
|
|
4357
|
|
|
if self.password and self.username: |
4358
|
|
|
token = md5(self.password + timestamp) |
4359
|
|
|
elif self.network.api_key and self.network.api_secret and \ |
4360
|
|
|
self.network.session_key: |
4361
|
|
|
if not self.username: |
4362
|
|
|
self.username = self.network.get_authenticated_user()\ |
4363
|
|
|
.get_name() |
4364
|
|
|
token = md5(self.network.api_secret + timestamp) |
4365
|
|
|
|
4366
|
|
|
params = { |
4367
|
|
|
"hs": "true", "p": "1.2.1", "c": self.client_id, |
4368
|
|
|
"v": self.client_version, "u": self.username, "t": timestamp, |
4369
|
|
|
"a": token} |
4370
|
|
|
|
4371
|
|
|
if self.network.session_key and self.network.api_key: |
4372
|
|
|
params["sk"] = self.network.session_key |
4373
|
|
|
params["api_key"] = self.network.api_key |
4374
|
|
|
|
4375
|
|
|
server = self.network.submission_server |
4376
|
|
|
response = _ScrobblerRequest( |
4377
|
|
|
server, params, self.network, "GET").execute().split("\n") |
4378
|
|
|
|
4379
|
|
|
self.session_id = response[1] |
4380
|
|
|
self.nowplaying_url = response[2] |
4381
|
|
|
self.submissions_url = response[3] |
4382
|
|
|
|
4383
|
1 |
|
def _get_session_id(self, new=False): |
4384
|
|
|
""" |
4385
|
|
|
Returns a handshake. If new is true, then it will be requested from |
4386
|
|
|
the server even if one was cached. |
4387
|
|
|
""" |
4388
|
|
|
|
4389
|
|
|
if not self.session_id or new: |
4390
|
|
|
self._do_handshake() |
4391
|
|
|
|
4392
|
|
|
return self.session_id |
4393
|
|
|
|
4394
|
1 |
|
def report_now_playing( |
4395
|
|
|
self, artist, title, album="", duration="", track_number="", |
4396
|
|
|
mbid=""): |
4397
|
|
|
|
4398
|
|
|
_deprecation_warning( |
4399
|
|
|
"DeprecationWarning: Use Network.update_now_playing(...) instead") |
4400
|
|
|
|
4401
|
|
|
params = { |
4402
|
|
|
"s": self._get_session_id(), "a": artist, "t": title, |
4403
|
|
|
"b": album, "l": duration, "n": track_number, "m": mbid} |
4404
|
|
|
|
4405
|
|
|
try: |
4406
|
|
|
_ScrobblerRequest( |
4407
|
|
|
self.nowplaying_url, params, self.network |
4408
|
|
|
).execute() |
4409
|
|
|
except BadSessionError: |
4410
|
|
|
self._do_handshake() |
4411
|
|
|
self.report_now_playing( |
4412
|
|
|
artist, title, album, duration, track_number, mbid) |
4413
|
|
|
|
4414
|
1 |
|
def scrobble( |
4415
|
|
|
self, artist, title, time_started, source, mode, duration, |
4416
|
|
|
album="", track_number="", mbid=""): |
4417
|
|
|
"""Scrobble a track. parameters: |
4418
|
|
|
artist: Artist name. |
4419
|
|
|
title: Track title. |
4420
|
|
|
time_started: UTC timestamp of when the track started playing. |
4421
|
|
|
source: The source of the track |
4422
|
|
|
SCROBBLE_SOURCE_USER: Chosen by the user |
4423
|
|
|
(the most common value, unless you have a reason for |
4424
|
|
|
choosing otherwise, use this). |
4425
|
|
|
SCROBBLE_SOURCE_NON_PERSONALIZED_BROADCAST: Non-personalised |
4426
|
|
|
broadcast (e.g. Shoutcast, BBC Radio 1). |
4427
|
|
|
SCROBBLE_SOURCE_PERSONALIZED_BROADCAST: Personalised |
4428
|
|
|
recommendation except Last.fm (e.g. Pandora, Launchcast). |
4429
|
|
|
SCROBBLE_SOURCE_LASTFM: ast.fm (any mode). In this case, the |
4430
|
|
|
5-digit recommendation_key value must be set. |
4431
|
|
|
SCROBBLE_SOURCE_UNKNOWN: Source unknown. |
4432
|
|
|
mode: The submission mode |
4433
|
|
|
SCROBBLE_MODE_PLAYED: The track was played. |
4434
|
|
|
SCROBBLE_MODE_LOVED: The user manually loved the track |
4435
|
|
|
(implies a listen) |
4436
|
|
|
SCROBBLE_MODE_SKIPPED: The track was skipped |
4437
|
|
|
(Only if source was Last.fm) |
4438
|
|
|
SCROBBLE_MODE_BANNED: The track was banned |
4439
|
|
|
(Only if source was Last.fm) |
4440
|
|
|
duration: Track duration in seconds. |
4441
|
|
|
album: The album name. |
4442
|
|
|
track_number: The track number on the album. |
4443
|
|
|
mbid: MusicBrainz ID. |
4444
|
|
|
""" |
4445
|
|
|
|
4446
|
|
|
_deprecation_warning( |
4447
|
|
|
"DeprecationWarning: Use Network.scrobble(...) instead") |
4448
|
|
|
|
4449
|
|
|
params = { |
4450
|
|
|
"s": self._get_session_id(), |
4451
|
|
|
"a[0]": _string(artist), |
4452
|
|
|
"t[0]": _string(title), |
4453
|
|
|
"i[0]": str(time_started), |
4454
|
|
|
"o[0]": source, |
4455
|
|
|
"r[0]": mode, |
4456
|
|
|
"l[0]": str(duration), |
4457
|
|
|
"b[0]": _string(album), |
4458
|
|
|
"n[0]": track_number, |
4459
|
|
|
"m[0]": mbid |
4460
|
|
|
} |
4461
|
|
|
|
4462
|
|
|
_ScrobblerRequest(self.submissions_url, params, self.network).execute() |
4463
|
|
|
|
4464
|
1 |
|
def scrobble_many(self, tracks): |
4465
|
|
|
""" |
4466
|
|
|
Scrobble several tracks at once. |
4467
|
|
|
|
4468
|
|
|
tracks: A sequence of a sequence of parameters for each track. |
4469
|
|
|
The order of parameters is the same as if passed to the |
4470
|
|
|
scrobble() method. |
4471
|
|
|
""" |
4472
|
|
|
|
4473
|
|
|
_deprecation_warning( |
4474
|
|
|
"DeprecationWarning: Use Network.scrobble_many(...) instead") |
4475
|
|
|
|
4476
|
|
|
remainder = [] |
4477
|
|
|
|
4478
|
|
|
if len(tracks) > 50: |
4479
|
|
|
remainder = tracks[50:] |
4480
|
|
|
tracks = tracks[:50] |
4481
|
|
|
|
4482
|
|
|
params = {"s": self._get_session_id()} |
4483
|
|
|
|
4484
|
|
|
i = 0 |
4485
|
|
|
for t in tracks: |
4486
|
|
|
_pad_list(t, 9, "") |
4487
|
|
|
params["a[%s]" % str(i)] = _string(t[0]) |
4488
|
|
|
params["t[%s]" % str(i)] = _string(t[1]) |
4489
|
|
|
params["i[%s]" % str(i)] = str(t[2]) |
4490
|
|
|
params["o[%s]" % str(i)] = t[3] |
4491
|
|
|
params["r[%s]" % str(i)] = t[4] |
4492
|
|
|
params["l[%s]" % str(i)] = str(t[5]) |
4493
|
|
|
params["b[%s]" % str(i)] = _string(t[6]) |
4494
|
|
|
params["n[%s]" % str(i)] = t[7] |
4495
|
|
|
params["m[%s]" % str(i)] = t[8] |
4496
|
|
|
|
4497
|
|
|
i += 1 |
4498
|
|
|
|
4499
|
|
|
_ScrobblerRequest(self.submissions_url, params, self.network).execute() |
4500
|
|
|
|
4501
|
|
|
if remainder: |
4502
|
|
|
self.scrobble_many(remainder) |
4503
|
|
|
|
4504
|
|
|
# End of file |
4505
|
|
|
|
Duplicated code is one of the most pungent code smells. If you need to duplicate the same code in three or more different places, we strongly encourage you to look into extracting the code into a single class or operation.
You can also find more detailed suggestions in the “Code” section of your repository.