Completed
Push — master ( ac4dad...d022cf )
by Dev
14:07 queued 12:58
created

Crawler::harvest()   C

Complexity

Conditions 13
Paths 32

Size

Total Lines 79
Code Lines 52

Duplication

Lines 0
Ratio 0 %

Code Coverage

Tests 45
CRAP Score 13.4121

Importance

Changes 0
Metric Value
eloc 52
dl 0
loc 79
ccs 45
cts 52
cp 0.8654
rs 6.6166
c 0
b 0
f 0
cc 13
nc 32
nop 1
crap 13.4121

How to fix   Long Method    Complexity   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
<?php
2
3
namespace PiedWeb\SeoPocketCrawler;
4
5
use PiedWeb\UrlHarvester\Harvest;
6
use PiedWeb\UrlHarvester\Indexable;
7
use Spatie\Robots\RobotsTxt;
8
9
class Crawler
10
{
11
    /**
12
     * @var string contain the user agent used during the crawl
13
     */
14
    protected $userAgent;
15
16
    /**
17
     * @var string crawl id
18
     */
19
    protected $id;
20
21
    /**
22
     * @var RobotsTxt page to ignore during the crawl
23
     */
24
    protected $ignore;
25
26
    /**
27
     * @var int depth max where to crawl
28
     */
29
    protected $limit;
30
31
    /**
32
     * @var string contain https://domain.tdl from start url
33
     */
34
    protected $base;
35
36
    /**
37
     * @var bool
38
     */
39
    protected $fromCache;
40
41
    protected $recorder;
42
    protected $robotsTxt;
43
    protected $request;
44
    protected $wait = 0;
45
46
    protected $currentClick = 0;
47
48
    protected $counter = 0;
49
50
    protected $urls = [];
51
52 6
    public function __construct(
53
        string $startUrl,
54
        string $ignore,
55
        int $limit,
56
        string $userAgent,
57
        int $cacheMethod = Recorder::CACHE_ID,
58
        int $waitInMicroSeconds = 100000
59
    ) {
60 6
        $startUrl = $this->setBaseAndReturnNormalizedStartUrl($startUrl);
61 6
        $this->urls[$startUrl] = null;
62 6
        $this->id = date('ymdHi').'-'.parse_url($startUrl, PHP_URL_HOST);
63 6
        $this->ignore = new RobotsTxt($ignore);
64 6
        $this->userAgent = $userAgent;
65 6
        $this->limit = $limit;
66 6
        $this->wait = $waitInMicroSeconds;
67
68 6
        $this->recorder = new Recorder($this->getDataFolder(), $cacheMethod);
69
70 6
        file_put_contents($this->getDataFolder().'/config.json', json_encode([
71 6
            'startUrl' => $startUrl,
72 6
            'base' => $this->base,
73 6
            'ignore' => $ignore,
74 6
            'limit' => $limit,
75 6
            'userAgent' => $userAgent,
76 6
            'cacheMethod' => $cacheMethod,
77 6
            'wait' => $waitInMicroSeconds,
78
        ]));
79 6
    }
80
81 3
    public function getId()
82
    {
83 3
        return $this->id;
84
    }
85
86 6
    protected function setBaseAndReturnNormalizedStartUrl(string $url): string
87
    {
88 6
        if (!filter_var($url, FILTER_VALIDATE_URL)) {
89
            throw new \Exception('start is not a valid URL `'.$url.'`');
90
        }
91
92 6
        $this->base = preg_match('@^(http://|https://)?[^/\?#]+@', $url, $match) ? $match[0] : $url;
93 6
        $url = substr($url, strlen($this->base));
94
95 6
        return ('/' != $url[0] ? '/' : '').$url;
96
    }
97
98 6
    public function getDataFolder()
99
    {
100 6
        return __DIR__.'/../data/'.$this->id;
101
    }
102
103 6
    public function crawl(bool $debug = false)
104
    {
105 6
        $nothingUpdated = true;
106
107 6
        if ($debug) {
108 6
            echo PHP_EOL.PHP_EOL.'// -----'.PHP_EOL.'// '.$this->counter.' crawled / '
109 6
                        .count($this->urls).' found '.PHP_EOL.'// -----'.PHP_EOL;
110
        }
111
112 6
        foreach ($this->urls as $urlToParse => $url) {
113 6
            if (null !== $url && (false === $url->can_be_crawled || true === $url->can_be_crawled)) { // déjà crawlé
114 1
                continue;
115 6
            } elseif ($this->currentClick > $this->limit) {
116 2
                continue;
117
            }
118
119 6
            if ($debug) {
120 6
                echo $this->counter.'/'.count($this->urls).'    '.$urlToParse.PHP_EOL;
121
            }
122
123 6
            $nothingUpdated = false;
124 6
            ++$this->counter;
125
126 6
            $harvest = $this->harvest($urlToParse);
127 6
            $this->urls[$urlToParse]->setDiscovered(count($this->urls));
128
129 6
            $this->cacheRobotsTxt($harvest);
130
131 6
            $this->cacheRequest($harvest);
132
133 6
            usleep($this->wait);
134
135 6
            if ($this->counter / 500 == round($this->counter / 500)) {
136
                echo $debug ? '    --- auto-save'.PHP_EOL : '';
137 4
                $this->recorder->record($this->urls);
138
            }
139
        }
140
141 6
        ++$this->currentClick;
142
143
        // Record after each Level:
144 6
        $this->recorder->record($this->urls);
145
146 6
        $record = $nothingUpdated || $this->currentClick >= $this->limit;
147
148 6
        return $record ? null : $this->crawl($debug);
149
    }
150
151 6
    protected function cacheRobotsTxt($harvest)
152
    {
153 6
        if (null === $this->robotsTxt && $harvest instanceof Harvest) {
154 4
            $this->robotsTxt = $harvest->getRobotsTxt();
155
        }
156
157 6
        return $this;
158
    }
159
160 6
    protected function cacheRequest($harvest)
161
    {
162 6
        if ($harvest instanceof Harvest) {
163 4
            $this->request = $harvest->getResponse()->getRequest();
164
        }
165
166 6
        return $this;
167
    }
168
169 4
    protected function loadRobotsTxt(Harvest $harvest)
170
    {
171 4
        if (null !== $this->robotsTxt) {
172
            $harvest->setRobotsTxt($this->robotsTxt);
173
        }
174
175 4
        return $this;
176
    }
177
178 6
    protected function harvest(string $urlToParse)
179
    {
180 6
        $url = $this->urls[$urlToParse] = $this->urls[$urlToParse] ?? new Url($this->base.$urlToParse, $this->currentClick);
181
182 6
        $url->can_be_crawled = $this->ignore->allows($this->base.$urlToParse, $this->userAgent);
183
184 6
        if (false === $url->can_be_crawled) {
185
            return;
186
        }
187
188 6
        $harvest = Harvest::fromUrl(
189 6
            $this->base.$urlToParse,
190 6
            $this->userAgent,
191 6
            'en,en-US;q=0.5',
192 6
            $this->request
193
        );
194
195 6
        if (!$harvest instanceof Harvest) {
196 2
            $url->indexable = Indexable::NOT_INDEXABLE_NETWORK_ERROR;
197
198 2
            return;
199
        }
200
201 4
        $this->loadRobotsTxt($harvest);
202
203 4
        $url->indexable = $harvest->isIndexable();
204
205 4
        if (Indexable::NOT_INDEXABLE_3XX === $url->indexable) {
206
            $redir = $harvest->getRedirection();
207
            if (false !== $redir) {
208
                $links = Harvest::LINK_INTERNAL === $harvest->getType($redir) ? [$redir] : [];
0 ignored issues
show
introduced by
The condition PiedWeb\UrlHarvester\Har...arvest->getType($redir) is always false.
Loading history...
209
            }
210
        } else {
211 4
            $this->recorder->cache($harvest, $url);
212
213 4
            $mimeType = $harvest->getResponse()->getMimeType();
214 4
            $url->mime_type = 'text/html' == $mimeType ? 1 : $mimeType;
215
216 4
            $this->recorder->recordOutboundLink($url, $harvest->getLinks());
217
218 4
            $url->links = count($harvest->getLinks());
219 4
            $url->links_duplicate = $harvest->getNbrDuplicateLinks();
220 4
            $url->links_internal = count($harvest->getLinks(Harvest::LINK_INTERNAL));
221 4
            $url->links_self = count($harvest->getLinks(Harvest::LINK_SELF));
222 4
            $url->links_sub = count($harvest->getLinks(Harvest::LINK_SUB));
223 4
            $url->links_external = count($harvest->getLinks(Harvest::LINK_EXTERNAL));
224 4
            $links = $harvest->getLinks(Harvest::LINK_INTERNAL);
225
226 4
            $url->ratio_text_code = $harvest->getRatioTxtCode();
227 4
            $url->load_time = $harvest->getResponse()->getInfo('total_time');
228 4
            $url->size = $harvest->getResponse()->getInfo('size_download');
229
230 4
            $breadcrumb = $harvest->getBreadCrumb();
231 4
            if (is_array($breadcrumb)) {
232
                $url->breadcrumb_level = count($breadcrumb);
233
                $url->breadcrumb_first = isset($breadcrumb[1]) ? $breadcrumb[1]->getCleanName() : '';
234
                $url->breadcrumb_text = $harvest->getBreadCrumb('//');
235
            }
236
237 4
            $url->title = $harvest->getUniqueTag('head title') ?? '';
238 4
            $url->kws = ','.implode(',', array_keys($harvest->getKws())).',';
239 4
            $url->h1 = $harvest->getUniqueTag('h1') ?? '';
240 4
            $url->h1 = $url->title == $url->h1 ? '=' : $url->h1;
241
        }
242
243 4
        $everAdd = [];
244 4
        if (isset($links)) {
245 4
            foreach ($links as $link) {
246 4
                $linkUrl = $link->getPageUrl();
247 4
                $this->urls[$linkUrl] = $this->urls[$linkUrl] ?? new Url($linkUrl, ($this->currentClick + 1));
248 4
                if (!isset($everAdd[$linkUrl])) {
249 4
                    $everAdd[$linkUrl] = 1;
250 4
                    $this->recorder->recordInboundLink($url, $this->urls[$linkUrl]);
251 4
                    ++$this->urls[$linkUrl]->inboundlinks;
252
                }
253
            }
254
        }
255
256 4
        return $harvest;
257
    }
258
}
259