@@ -19,7 +19,7 @@ |
||
| 19 | 19 | */ |
| 20 | 20 | public function isFiltered(UriInterface $currentUri, UriInterface $startUri) |
| 21 | 21 | { |
| 22 | - return strlen((string)$currentUri) > 255; |
|
| 22 | + return strlen((string) $currentUri) > 255; |
|
| 23 | 23 | } |
| 24 | 24 | |
| 25 | 25 | public function isResponseFiltered(ResponseInterface $response, UriInterface $startUri) |
@@ -64,8 +64,8 @@ |
||
| 64 | 64 | */ |
| 65 | 65 | public function hasAttribute($key) |
| 66 | 66 | { |
| 67 | - foreach($this->attributes as $attribute) { |
|
| 68 | - if($attribute->getKey() == $key) { |
|
| 67 | + foreach ($this->attributes as $attribute) { |
|
| 68 | + if ($attribute->getKey() == $key) { |
|
| 69 | 69 | return true; |
| 70 | 70 | } |
| 71 | 71 | } |
@@ -21,12 +21,12 @@ discard block |
||
| 21 | 21 | public function validate(ResponseInterface $response) |
| 22 | 22 | { |
| 23 | 23 | if ($response instanceof UriAwareResponse) { |
| 24 | - $url = $response->getUri()->getScheme() . '://' . $response->getUri()->getHost(); |
|
| 24 | + $url = $response->getUri()->getScheme().'://'.$response->getUri()->getHost(); |
|
| 25 | 25 | |
| 26 | 26 | if (substr_count($url, '/') === 2) { |
| 27 | - $filename = $robotsUrl = $url . '/robots.txt'; |
|
| 27 | + $filename = $robotsUrl = $url.'/robots.txt'; |
|
| 28 | 28 | } elseif (substr_count($url, '/') === 3) { |
| 29 | - $filename = $robotsUrl = $url . 'robots.txt'; |
|
| 29 | + $filename = $robotsUrl = $url.'robots.txt'; |
|
| 30 | 30 | } else { |
| 31 | 31 | return; |
| 32 | 32 | } |
@@ -39,7 +39,7 @@ discard block |
||
| 39 | 39 | |
| 40 | 40 | $normalizedContent = $this->normalizeContent($content); |
| 41 | 41 | |
| 42 | - if (strpos($normalizedContent, 'user-agent:* disallow:/' . PHP_EOL) !== false) { |
|
| 42 | + if (strpos($normalizedContent, 'user-agent:* disallow:/'.PHP_EOL) !== false) { |
|
| 43 | 43 | throw new ValidationFailedException('The robots.txt contains disallow all (Disallow: /)'); |
| 44 | 44 | } |
| 45 | 45 | |