|
1
|
|
|
<?php |
|
2
|
|
|
|
|
3
|
|
|
/* |
|
4
|
|
|
* (c) Christian Gripp <[email protected]> |
|
5
|
|
|
* |
|
6
|
|
|
* For the full copyright and license information, please view the LICENSE |
|
7
|
|
|
* file that was distributed with this source code. |
|
8
|
|
|
*/ |
|
9
|
|
|
|
|
10
|
|
|
namespace Core23\LastFm\Crawler; |
|
11
|
|
|
|
|
12
|
|
|
use Core23\LastFm\Model\Event; |
|
13
|
|
|
use Core23\LastFm\Model\GeoLocation; |
|
14
|
|
|
use DateTime; |
|
15
|
|
|
use Symfony\Component\DomCrawler\Crawler; |
|
16
|
|
|
|
|
17
|
|
|
final class EventListCrawler extends AbstractCrawler implements EventListCrawlerInterface |
|
18
|
|
|
{ |
|
19
|
|
|
private const BASE_URL = 'https://www.last.fm/events'; |
|
20
|
|
|
|
|
21
|
|
|
/** |
|
22
|
|
|
* {@inheritdoc} |
|
23
|
|
|
*/ |
|
24
|
|
|
public function getEvents(GeoLocation $location, $radius = 100, int $page = 1): array |
|
25
|
|
|
{ |
|
26
|
|
|
$node = $this->crawlUrl($location, $radius, $page); |
|
27
|
|
|
|
|
28
|
|
|
if (null === $node) { |
|
29
|
|
|
return []; |
|
30
|
|
|
} |
|
31
|
|
|
|
|
32
|
|
|
$resultList = []; |
|
33
|
|
|
|
|
34
|
|
|
$node->filter('.page-content section')->each(function (Crawler $node) use (&$resultList) { |
|
35
|
|
|
$headingNode = $node->filter('.group-heading'); |
|
36
|
|
|
|
|
37
|
|
|
$datetime = new DateTime(trim($headingNode->text())); |
|
38
|
|
|
|
|
39
|
|
|
$resultList = array_merge($resultList, $this->crawlEventListGroup($node, $datetime)); |
|
40
|
|
|
}); |
|
41
|
|
|
|
|
42
|
|
|
return $resultList; |
|
43
|
|
|
} |
|
44
|
|
|
|
|
45
|
|
|
/** |
|
46
|
|
|
* {@inheritdoc} |
|
47
|
|
|
*/ |
|
48
|
|
|
public function getPages(GeoLocation $location, $radius = 100): int |
|
49
|
|
|
{ |
|
50
|
|
|
$node = $this->crawlUrl($location, $radius); |
|
51
|
|
|
|
|
52
|
|
|
if (null === $node) { |
|
53
|
|
|
return 0; |
|
54
|
|
|
} |
|
55
|
|
|
|
|
56
|
|
|
$lastNode = $node->filter('.pagination .pagination-page')->last(); |
|
57
|
|
|
|
|
58
|
|
|
return (int) $lastNode->text(); |
|
59
|
|
|
} |
|
60
|
|
|
|
|
61
|
|
|
/** |
|
62
|
|
|
* @param Crawler $node |
|
63
|
|
|
* @param DateTime $datetime |
|
64
|
|
|
* |
|
65
|
|
|
* @return array |
|
66
|
|
|
*/ |
|
67
|
|
|
private function crawlEventListGroup(Crawler $node, DateTime $datetime): array |
|
68
|
|
|
{ |
|
69
|
|
|
return $node->filter('.events-list-item')->each( |
|
70
|
|
|
function (Crawler $node) use ($datetime): Event { |
|
|
|
|
|
|
71
|
|
|
return $this->parseEvent($node, $datetime); |
|
72
|
|
|
} |
|
73
|
|
|
); |
|
74
|
|
|
} |
|
75
|
|
|
|
|
76
|
|
|
/** |
|
77
|
|
|
* @param GeoLocation $location |
|
78
|
|
|
* @param int $radius |
|
79
|
|
|
* @param int $page |
|
80
|
|
|
* |
|
81
|
|
|
* @return Crawler|null |
|
82
|
|
|
*/ |
|
83
|
|
|
private function crawlUrl(GeoLocation $location, int $radius, int $page = 1): ?Crawler |
|
84
|
|
|
{ |
|
85
|
|
|
$url = static::BASE_URL; |
|
86
|
|
|
$url .= '?location_0=Germany'; |
|
87
|
|
|
$url .= '&location_1='.$location->getLongitude(); |
|
88
|
|
|
$url .= '&location_2='.$location->getLatitude(); |
|
89
|
|
|
$url .= '&radius='.($radius*1000); |
|
90
|
|
|
$url .= '&page='.$page; |
|
91
|
|
|
|
|
92
|
|
|
return $this->crawl($url); |
|
93
|
|
|
} |
|
94
|
|
|
} |
|
95
|
|
|
|