Passed
Push — master ( b2f1dc...ac4dad )
by Dev
13:52
created

Crawler::setBaseAndReturnNormalizedStartUrl()   A

Complexity

Conditions 4
Paths 5

Size

Total Lines 10
Code Lines 5

Duplication

Lines 0
Ratio 0 %

Code Coverage

Tests 4
CRAP Score 4

Importance

Changes 0
Metric Value
eloc 5
dl 0
loc 10
ccs 4
cts 4
cp 1
rs 10
c 0
b 0
f 0
cc 4
nc 5
nop 1
crap 4
1
<?php
2
3
namespace PiedWeb\SeoPocketCrawler;
4
5
use PiedWeb\UrlHarvester\Harvest;
6
use PiedWeb\UrlHarvester\Indexable;
7
use Spatie\Robots\RobotsTxt;
8
9
class Crawler
10
{
11
    /**
12
     * @var string contain the user agent used during the crawl
13
     */
14
    protected $userAgent;
15
16
    /**
17
     * @var string crawl id
18
     */
19
    protected $id;
20
21
    /**
22
     * @var RobotsTxt page to ignore during the crawl
23
     */
24
    protected $ignore;
25
26 6
    /**
27
     * @var int depth max where to crawl
28
     */
29
    protected $limit;
30
31
    /**
32
     * @var string contain https://domain.tdl from start url
33 6
     */
34 6
    protected $base;
35 6
36 6
    /**
37 6
     * @var bool
38
     */
39 6
    protected $fromCache;
40 6
41
    protected $recorder;
42 6
    protected $robotsTxt;
43
    protected $request;
44 6
    protected $wait = 0;
45
46
    protected $currentClick = 0;
47
48
    protected $counter = 0;
49
50
    protected $urls = [];
51
52 6
    public function __construct(
53
        string $startUrl,
54 6
        string $ignore,
55
        int $limit,
56 6
        string $userAgent,
57 3
        int $cacheMethod = Recorder::CACHE_ID,
58 3
        int $waitInMicroSeconds = 100000
59
    ) {
60
        $startUrl = $this->setBaseAndReturnNormalizedStartUrl($startUrl);
61 6
        $this->urls[$startUrl] = null;
62 6
        $this->id = date('ymdHi').'-'.parse_url($startUrl, PHP_URL_HOST);
63
        $this->ignore = new RobotsTxt($ignore);
64
        $this->userAgent = $userAgent;
65
        $this->limit = $limit;
66 6
        $this->wait = $waitInMicroSeconds;
67 3
68
        $this->recorder = new Recorder($this->getDataFolder(), $cacheMethod);
69
70 6
        file_put_contents($this->getDataFolder().'/config.json', json_encode([
71 6
            'startUrl' => $startUrl,
72
            'base' => $this->base,
73 6
            'ignore' => $ignore,
74 6
            'limit' => $limit,
75
            'userAgent' => $userAgent,
76 6
            'cacheMethod' => $cacheMethod,
77
            'wait' => $waitInMicroSeconds,
78 6
        ]));
79
    }
80 6
81
    public function getId()
82 6
    {
83
        return $this->id;
84 4
    }
85
86
    protected function setBaseAndReturnNormalizedStartUrl(string $url): string
87
    {
88 6
        if (!filter_var($url, FILTER_VALIDATE_URL)) {
89
            throw new \Exception('start is not a valid URL `'.$url.'`');
90
        }
91 6
92
        $this->base = preg_match('@^(http://|https://)?[^/\?#]+@', $url, $match) ? $match[0] : $url;
93 6
        $url = substr($url, strlen($this->base));
94
95 6
        return ('/' != $url[0] ? '/' : '').$url;
96
    }
97
98 6
    public function getDataFolder()
99
    {
100 6
        return __DIR__.'/../data/'.$this->id;
101 4
    }
102
103
    public function crawl(bool $debug = false)
104 6
    {
105
        $nothingUpdated = true;
106
107 6
        if ($debug) {
108
            echo PHP_EOL.PHP_EOL.'// -----'.PHP_EOL.'// '.$this->counter.' crawled / '
109 6
                        .count($this->urls).' found '.PHP_EOL.'// -----'.PHP_EOL;
110 4
        }
111
112
        foreach ($this->urls as $urlToParse => $url) {
113 6
            if (null !== $url && (false === $url->can_be_crawled || true === $url->can_be_crawled)) { // déjà crawlé
114
                continue;
115
            } elseif ($this->currentClick > $this->limit) {
116 4
                continue;
117
            }
118 4
119
            if ($debug) {
120
                echo $this->counter.'/'.count($this->urls).'    '.$urlToParse.PHP_EOL;
121
            }
122 4
123
            $nothingUpdated = false;
124
            ++$this->counter;
125 6
126
            $harvest = $this->harvest($urlToParse);
127 6
            $this->urls[$urlToParse]->setDiscovered(count($this->urls));
128
129 6
            $this->cacheRobotsTxt($harvest);
130 6
131
            $this->cacheRequest($harvest);
132 6
133
            usleep($this->wait);
134
135
            if ($this->counter / 500 == round($this->counter / 500)) {
136 6
                echo $debug ? '    --- auto-save'.PHP_EOL : '';
137 6
                $this->recorder->record($this->urls);
138 6
            }
139 6
        }
140 6
141
        ++$this->currentClick;
142
143 6
        // Record after each Level:
144 2
        $this->recorder->record($this->urls);
145
146 2
        $record = $nothingUpdated || $this->currentClick >= $this->limit;
147
148
        return $record ? null : $this->crawl($debug);
149 4
    }
150
151 4
    protected function cacheRobotsTxt($harvest)
152
    {
153 4
        if (null === $this->robotsTxt && $harvest instanceof Harvest) {
154
            $this->robotsTxt = $harvest->getRobotsTxt();
155
        }
156
157
        return $this;
158
    }
159 4
160
    protected function cacheRequest($harvest)
161 4
    {
162 4
        if ($harvest instanceof Harvest) {
163
            $this->request = $harvest->getResponse()->getRequest();
164 4
        }
165
166 4
        return $this;
167 4
    }
168 4
169 4
    protected function loadRobotsTxt(Harvest $harvest)
170 4
    {
171 4
        if (null !== $this->robotsTxt) {
172 4
            $harvest->setRobotsTxt($this->robotsTxt);
173
        }
174 4
175 4
        return $this;
176 4
    }
177
178 4
    protected function harvest(string $urlToParse)
179 4
    {
180
        $url = $this->urls[$urlToParse] = $this->urls[$urlToParse] ?? new Url($this->base.$urlToParse, $this->currentClick);
181
182
        $url->can_be_crawled = $this->ignore->allows($this->base.$urlToParse, $this->userAgent);
183
184
        if (false === $url->can_be_crawled) {
185 4
            return;
186 4
        }
187 4
188 4
        $harvest = Harvest::fromUrl(
189
            $this->base.$urlToParse,
190
            $this->userAgent,
191 4
            'en,en-US;q=0.5',
192 4
            $this->request
193 4
        );
194 4
195 4
        if (!$harvest instanceof Harvest) {
196 4
            $url->indexable = Indexable::NOT_INDEXABLE_NETWORK_ERROR;
197 4
198 4
            return;
199 4
        }
200
201
        $this->loadRobotsTxt($harvest);
202
203
        $url->indexable = $harvest->isIndexable();
204 4
205
        if (Indexable::NOT_INDEXABLE_3XX === $url->indexable) {
206
            $redir = $harvest->getRedirection();
207
            if (false !== $redir) {
208
                $links = Harvest::LINK_INTERNAL === $harvest->getType($redir) ? [$redir] : [];
0 ignored issues
show
introduced by
The condition PiedWeb\UrlHarvester\Har...arvest->getType($redir) is always false.
Loading history...
209
            }
210
        } else {
211
            $this->recorder->cache($harvest, $url);
212
213
            $mimeType = $harvest->getResponse()->getMimeType();
214
            $url->mime_type = 'text/html' == $mimeType ? 1 : $mimeType;
215
216
            $this->recorder->recordOutboundLink($url, $harvest->getLinks());
217
218
            $url->links = count($harvest->getLinks());
219
            $url->links_duplicate = $harvest->getNbrDuplicateLinks();
220
            $url->links_internal = count($harvest->getLinks(Harvest::LINK_INTERNAL));
221
            $url->links_self = count($harvest->getLinks(Harvest::LINK_SELF));
222
            $url->links_sub = count($harvest->getLinks(Harvest::LINK_SUB));
223
            $url->links_external = count($harvest->getLinks(Harvest::LINK_EXTERNAL));
224
            $links = $harvest->getLinks(Harvest::LINK_INTERNAL);
225
226
            $url->ratio_text_code = $harvest->getRatioTxtCode();
227
            $url->load_time = $harvest->getResponse()->getInfo('total_time');
228
            $url->size = $harvest->getResponse()->getInfo('size_download');
229
230
            $breadcrumb = $harvest->getBreadCrumb();
231
            if (is_array($breadcrumb)) {
232
                $url->breadcrumb_level = count($breadcrumb);
233
                $url->breadcrumb_first = isset($breadcrumb[1]) ? $breadcrumb[1]->getCleanName() : '';
234
                $url->breadcrumb_text = $harvest->getBreadCrumb('//');
235
            }
236
237
            $url->title = $harvest->getUniqueTag('head title') ?? '';
238
            $url->kws = ','.implode(',', array_keys($harvest->getKws())).',';
239
            $url->h1 = $harvest->getUniqueTag('h1') ?? '';
240
            $url->h1 = $url->title == $url->h1 ? '=' : $url->h1;
241
        }
242
243
        $everAdd = [];
244
        if (isset($links)) {
245
            foreach ($links as $link) {
246
                $linkUrl = $link->getPageUrl();
247
                $this->urls[$linkUrl] = $this->urls[$linkUrl] ?? new Url($linkUrl, ($this->currentClick + 1));
248
                if (!isset($everAdd[$linkUrl])) {
249
                    $everAdd[$linkUrl] = 1;
250
                    $this->recorder->recordInboundLink($url, $this->urls[$linkUrl]);
251
                    ++$this->urls[$linkUrl]->inboundlinks;
252
                }
253
            }
254
        }
255
256
        return $harvest;
257
    }
258
}
259