@@ -13,12 +13,12 @@ discard block |
||
13 | 13 | { |
14 | 14 | public function validate(Response $response) |
15 | 15 | { |
16 | - $url = $response->getUri()->getScheme() . '://' . $response->getUri()->getHost(); |
|
16 | + $url = $response->getUri()->getScheme().'://'.$response->getUri()->getHost(); |
|
17 | 17 | |
18 | 18 | if (substr_count($url, '/') === 2) { |
19 | - $filename = $robotsUrl = $url . '/robots.txt'; |
|
19 | + $filename = $robotsUrl = $url.'/robots.txt'; |
|
20 | 20 | } elseif (substr_count($url, '/') === 3) { |
21 | - $filename = $robotsUrl = $url . 'robots.txt'; |
|
21 | + $filename = $robotsUrl = $url.'robots.txt'; |
|
22 | 22 | } else { |
23 | 23 | return; |
24 | 24 | } |
@@ -30,7 +30,7 @@ discard block |
||
30 | 30 | $content = file_get_contents($filename); |
31 | 31 | $normalizedContent = strtolower(str_replace(' ', '', $content)); |
32 | 32 | |
33 | - if (strpos($normalizedContent, 'disallow:/' . PHP_EOL) !== false) { |
|
33 | + if (strpos($normalizedContent, 'disallow:/'.PHP_EOL) !== false) { |
|
34 | 34 | throw new ValidationFailedException('The robots.txt contains disallow all (Disallow: /)'); |
35 | 35 | } |
36 | 36 |