Complex classes like Crawler often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes. You can also have a look at the cohesion graph to spot any un-connected, or weakly-connected components.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
While breaking up the class, it is a good idea to analyze how other classes use Crawler, and based on these observations, apply Extract Interface, too.
1 | <?php |
||
23 | class Crawler |
||
24 | { |
||
25 | /** @var \GuzzleHttp\Client */ |
||
26 | protected $client; |
||
27 | |||
28 | /** @var \Psr\Http\Message\UriInterface */ |
||
29 | protected $baseUrl; |
||
30 | |||
31 | /** @var \Spatie\Crawler\CrawlObserverCollection */ |
||
32 | protected $crawlObservers; |
||
33 | |||
34 | /** @var \Spatie\Crawler\CrawlProfile */ |
||
35 | protected $crawlProfile; |
||
36 | |||
37 | /** @var int */ |
||
38 | protected $concurrency; |
||
39 | |||
40 | /** @var \Spatie\Crawler\CrawlQueue\CrawlQueue */ |
||
41 | protected $crawlQueue; |
||
42 | |||
43 | /** @var int */ |
||
44 | protected $crawledUrlCount = 0; |
||
45 | |||
46 | /** @var int|null */ |
||
47 | protected $maximumCrawlCount = null; |
||
48 | |||
49 | /** @var int */ |
||
50 | protected $maximumResponseSize = 1024 * 1024 * 2; |
||
51 | |||
52 | /** @var int|null */ |
||
53 | protected $maximumDepth = null; |
||
54 | |||
55 | /** @var bool */ |
||
56 | protected $respectRobots = true; |
||
57 | |||
58 | /** @var \Tree\Node\Node */ |
||
59 | protected $depthTree; |
||
60 | |||
61 | /** @var bool */ |
||
62 | protected $executeJavaScript = false; |
||
63 | |||
64 | /** @var Browsershot */ |
||
65 | protected $browsershot = null; |
||
66 | |||
67 | /** @var \Spatie\Robots\RobotsTxt */ |
||
68 | protected $robotsTxt = null; |
||
69 | |||
70 | /** @var string */ |
||
71 | protected $crawlRequestFulfilledClass; |
||
72 | |||
73 | /** @var string */ |
||
74 | protected $crawlRequestFailedClass; |
||
75 | |||
76 | /** @var */ |
||
77 | protected static $defaultClientOptions = [ |
||
78 | RequestOptions::COOKIES => true, |
||
79 | RequestOptions::CONNECT_TIMEOUT => 10, |
||
80 | RequestOptions::TIMEOUT => 10, |
||
81 | RequestOptions::ALLOW_REDIRECTS => false, |
||
82 | ]; |
||
83 | |||
84 | public static function create(array $clientOptions = []): Crawler |
||
94 | |||
95 | public function __construct(Client $client, int $concurrency = 10) |
||
111 | |||
112 | public function setConcurrency(int $concurrency): Crawler |
||
118 | |||
119 | public function setMaximumResponseSize(int $maximumResponseSizeInBytes): Crawler |
||
125 | |||
126 | public function getMaximumResponseSize(): ?int |
||
130 | |||
131 | public function setMaximumCrawlCount(int $maximumCrawlCount): Crawler |
||
137 | |||
138 | public function getMaximumCrawlCount(): ?int |
||
142 | |||
143 | public function getCrawlerUrlCount(): int |
||
147 | |||
148 | public function setMaximumDepth(int $maximumDepth): Crawler |
||
154 | |||
155 | public function getMaximumDepth(): ?int |
||
159 | |||
160 | public function ignoreRobots(): Crawler |
||
166 | |||
167 | public function respectRobots(): Crawler |
||
173 | |||
174 | public function mustRespectRobots(): bool |
||
178 | |||
179 | public function getRobotsTxt(): RobotsTxt |
||
183 | |||
184 | public function setCrawlQueue(CrawlQueue $crawlQueue): Crawler |
||
190 | |||
191 | public function getCrawlQueue(): CrawlQueue |
||
195 | |||
196 | public function executeJavaScript(): Crawler |
||
202 | |||
203 | public function doNotExecuteJavaScript(): Crawler |
||
209 | |||
210 | public function mayExecuteJavascript(): bool |
||
214 | |||
215 | /** |
||
216 | * @param \Spatie\Crawler\CrawlObserver|array[\Spatie\Crawler\CrawlObserver] $crawlObservers |
||
|
|||
217 | * |
||
218 | * @return $this |
||
219 | */ |
||
220 | public function setCrawlObserver($crawlObservers): Crawler |
||
228 | |||
229 | public function setCrawlObservers(array $crawlObservers): Crawler |
||
235 | |||
236 | public function addCrawlObserver(CrawlObserver $crawlObserver): Crawler |
||
242 | |||
243 | public function getCrawlObservers(): CrawlObserverCollection |
||
247 | |||
248 | public function setCrawlProfile(CrawlProfile $crawlProfile): Crawler |
||
254 | |||
255 | public function getCrawlProfile(): CrawlProfile |
||
259 | |||
260 | public function setCrawlFulfilledHandlerClass(string $crawlRequestFulfilledClass): Crawler |
||
272 | |||
273 | public function setCrawlFailedHandlerClass(string $crawlRequestFailedClass): Crawler |
||
285 | |||
286 | |||
287 | public function setBrowsershot(Browsershot $browsershot) |
||
293 | |||
294 | public function getBrowsershot(): Browsershot |
||
302 | |||
303 | |||
304 | public function getBaseUrl(): UriInterface |
||
308 | |||
309 | /** |
||
310 | * @param \Psr\Http\Message\UriInterface|string $baseUrl |
||
311 | */ |
||
312 | public function startCrawling($baseUrl) |
||
346 | |||
347 | public function addToDepthTree(UriInterface $url, UriInterface $parentUrl, Node $node = null): ?Node |
||
371 | |||
372 | protected function startCrawlingQueue() |
||
387 | |||
388 | /** |
||
389 | * @deprecated This function will be removed in the next major version |
||
390 | */ |
||
391 | public function endsWith($haystack, $needle) |
||
396 | |||
397 | protected function createRobotsTxt(UriInterface $uri): RobotsTxt |
||
401 | |||
402 | protected function getCrawlRequests(): Generator |
||
423 | |||
424 | public function addToCrawlQueue(CrawlUrl $crawlUrl): Crawler |
||
440 | |||
441 | public function maximumCrawlCountReached(): bool |
||
451 | } |
||
452 |
This check marks PHPDoc comments that could not be parsed by our parser. To see which comment annotations we can parse, please refer to our documentation on supported doc-types.