Complex classes like Crawler often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes. You can also have a look at the cohesion graph to spot any un-connected, or weakly-connected components.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
While breaking up the class, it is a good idea to analyze how other classes use Crawler, and based on these observations, apply Extract Interface, too.
1 | <?php |
||
21 | class Crawler |
||
22 | { |
||
23 | /** @var \GuzzleHttp\Client */ |
||
24 | protected $client; |
||
25 | |||
26 | /** @var \Psr\Http\Message\UriInterface */ |
||
27 | protected $baseUrl; |
||
28 | |||
29 | /** @var \Spatie\Crawler\CrawlObserverCollection */ |
||
30 | protected $crawlObservers; |
||
31 | |||
32 | /** @var \Spatie\Crawler\CrawlProfile */ |
||
33 | protected $crawlProfile; |
||
34 | |||
35 | /** @var int */ |
||
36 | protected $concurrency; |
||
37 | |||
38 | /** @var \Spatie\Crawler\CrawlQueue\CrawlQueue */ |
||
39 | protected $crawlQueue; |
||
40 | |||
41 | /** @var int */ |
||
42 | protected $crawledUrlCount = 0; |
||
43 | |||
44 | /** @var int|null */ |
||
45 | protected $maximumCrawlCount = null; |
||
46 | |||
47 | /** @var int */ |
||
48 | protected $maximumResponseSize = 1024 * 1024 * 2; |
||
49 | |||
50 | /** @var int|null */ |
||
51 | protected $maximumDepth = null; |
||
52 | |||
53 | /** @var bool */ |
||
54 | protected $respectRobots = true; |
||
55 | |||
56 | /** @var \Tree\Node\Node */ |
||
57 | protected $depthTree; |
||
58 | |||
59 | /** @var bool */ |
||
60 | protected $executeJavaScript = false; |
||
61 | |||
62 | /** @var Browsershot */ |
||
63 | protected $browsershot = null; |
||
64 | |||
65 | /** @var \Spatie\Robots\RobotsTxt */ |
||
66 | protected $robotsTxt = null; |
||
67 | |||
68 | /** @var string */ |
||
69 | protected $crawlRequestFulfilledClass; |
||
70 | |||
71 | /** @var string */ |
||
72 | protected $crawlRequestFailedClass; |
||
73 | |||
74 | /** @var int */ |
||
75 | protected $delayBetweenRequests = 0; |
||
76 | |||
77 | /** @var */ |
||
78 | protected static $defaultClientOptions = [ |
||
79 | RequestOptions::COOKIES => true, |
||
80 | RequestOptions::CONNECT_TIMEOUT => 10, |
||
81 | RequestOptions::TIMEOUT => 10, |
||
82 | RequestOptions::ALLOW_REDIRECTS => false, |
||
83 | ]; |
||
84 | |||
85 | public static function create(array $clientOptions = []): Crawler |
||
95 | |||
96 | public function __construct(Client $client, int $concurrency = 10) |
||
112 | |||
113 | public function setConcurrency(int $concurrency): Crawler |
||
119 | |||
120 | public function setMaximumResponseSize(int $maximumResponseSizeInBytes): Crawler |
||
126 | |||
127 | public function getMaximumResponseSize(): ?int |
||
131 | |||
132 | public function setMaximumCrawlCount(int $maximumCrawlCount): Crawler |
||
138 | |||
139 | public function getMaximumCrawlCount(): ?int |
||
143 | |||
144 | public function getCrawlerUrlCount(): int |
||
148 | |||
149 | public function setMaximumDepth(int $maximumDepth): Crawler |
||
155 | |||
156 | public function getMaximumDepth(): ?int |
||
160 | |||
161 | /** |
||
162 | * @param int $delay The delay in milliseconds. |
||
163 | * |
||
164 | * @return Crawler |
||
165 | */ |
||
166 | public function setDelayBetweenRequests(int $delay): Crawler |
||
172 | |||
173 | /** |
||
174 | * @return int The delay in milliseconds. |
||
175 | */ |
||
176 | public function getDelayBetweenRequests(): int |
||
180 | |||
181 | public function ignoreRobots(): Crawler |
||
187 | |||
188 | public function respectRobots(): Crawler |
||
194 | |||
195 | public function mustRespectRobots(): bool |
||
199 | |||
200 | public function getRobotsTxt(): RobotsTxt |
||
204 | |||
205 | public function setCrawlQueue(CrawlQueue $crawlQueue): Crawler |
||
211 | |||
212 | public function getCrawlQueue(): CrawlQueue |
||
216 | |||
217 | public function executeJavaScript(): Crawler |
||
223 | |||
224 | public function doNotExecuteJavaScript(): Crawler |
||
230 | |||
231 | public function mayExecuteJavascript(): bool |
||
235 | |||
236 | /** |
||
237 | * @param \Spatie\Crawler\CrawlObserver|array[\Spatie\Crawler\CrawlObserver] $crawlObservers |
||
|
|||
238 | * |
||
239 | * @return $this |
||
240 | */ |
||
241 | public function setCrawlObserver($crawlObservers): Crawler |
||
249 | |||
250 | public function setCrawlObservers(array $crawlObservers): Crawler |
||
256 | |||
257 | public function addCrawlObserver(CrawlObserver $crawlObserver): Crawler |
||
263 | |||
264 | public function getCrawlObservers(): CrawlObserverCollection |
||
268 | |||
269 | public function setCrawlProfile(CrawlProfile $crawlProfile): Crawler |
||
275 | |||
276 | public function getCrawlProfile(): CrawlProfile |
||
280 | |||
281 | public function setCrawlFulfilledHandlerClass(string $crawlRequestFulfilledClass): Crawler |
||
293 | |||
294 | public function setCrawlFailedHandlerClass(string $crawlRequestFailedClass): Crawler |
||
306 | |||
307 | public function setBrowsershot(Browsershot $browsershot) |
||
313 | |||
314 | public function setUserAgent(string $userAgent): Crawler |
||
315 | { |
||
316 | $clientOptions = $this->client->getConfig(); |
||
317 | $clientOptions['headers']['User-Agent'] = strtolower($userAgent); |
||
318 | |||
319 | $this->client = new Client($clientOptions); |
||
320 | |||
321 | return $this; |
||
322 | } |
||
323 | |||
324 | public function getUserAgent(): string |
||
328 | |||
329 | public function getBrowsershot(): Browsershot |
||
337 | |||
338 | public function getBaseUrl(): UriInterface |
||
342 | |||
343 | /** |
||
344 | * @param \Psr\Http\Message\UriInterface|string $baseUrl |
||
345 | */ |
||
346 | public function startCrawling($baseUrl) |
||
380 | |||
381 | public function addToDepthTree(UriInterface $url, UriInterface $parentUrl, Node $node = null): ?Node |
||
409 | |||
410 | protected function startCrawlingQueue() |
||
425 | |||
426 | /** |
||
427 | * @deprecated This function will be removed in the next major version |
||
428 | */ |
||
429 | public function endsWith($haystack, $needle) |
||
434 | |||
435 | protected function createRobotsTxt(UriInterface $uri): RobotsTxt |
||
439 | |||
440 | protected function getCrawlRequests(): Generator |
||
461 | |||
462 | public function addToCrawlQueue(CrawlUrl $crawlUrl): Crawler |
||
478 | |||
479 | public function maximumCrawlCountReached(): bool |
||
489 | } |
||
490 |
This check marks PHPDoc comments that could not be parsed by our parser. To see which comment annotations we can parse, please refer to our documentation on supported doc-types.