1 | <?php |
||
11 | class UserAgentTools implements RobotsTxtInterface |
||
12 | { |
||
13 | use UrlParser; |
||
14 | use DirectiveParserCommons; |
||
15 | |||
16 | /** |
||
17 | * Rules |
||
18 | * @var SubDirectiveHandler |
||
19 | */ |
||
20 | protected $handler; |
||
21 | |||
22 | /** |
||
23 | * Base Uri |
||
24 | * @var string |
||
25 | */ |
||
26 | private $base; |
||
27 | |||
28 | /** |
||
29 | * Status code |
||
30 | * @var int|null |
||
31 | */ |
||
32 | private $statusCode; |
||
33 | |||
34 | /** |
||
35 | * DisAllowClient constructor. |
||
36 | * |
||
37 | * @param string $base |
||
38 | * @param int|null $statusCode |
||
39 | * @param SubDirectiveHandler $handler |
||
40 | */ |
||
41 | public function __construct(SubDirectiveHandler $handler, $base, $statusCode) |
||
47 | |||
48 | /** |
||
49 | * UserAgentClient destructor. |
||
50 | */ |
||
51 | public function __destruct() |
||
55 | |||
56 | /** |
||
57 | * Check if URL is allowed to crawl |
||
58 | * |
||
59 | * @param string $url |
||
60 | * @return bool |
||
61 | */ |
||
62 | public function isAllowed($url) |
||
66 | |||
67 | /** |
||
68 | * Check |
||
69 | * |
||
70 | * @param string $directive |
||
71 | * @param string $url |
||
72 | * @return bool |
||
73 | * @throws ClientException |
||
74 | */ |
||
75 | private function check($directive, $url) |
||
103 | |||
104 | /** |
||
105 | * Check if the URL belongs to current robots.txt |
||
106 | * |
||
107 | * @param string[] $urls |
||
108 | * @return bool |
||
109 | */ |
||
110 | private function isUrlApplicable($urls) |
||
124 | |||
125 | /** |
||
126 | * Check if URL is disallowed to crawl |
||
127 | * |
||
128 | * @param string $url |
||
129 | * @return bool |
||
130 | */ |
||
131 | public function isDisallowed($url) |
||
135 | |||
136 | /** |
||
137 | * Rule export |
||
138 | * |
||
139 | * @return array |
||
140 | */ |
||
141 | public function export() |
||
154 | } |
||
155 |