Completed
Push — master ( 2c871d...7c338b )
by Nils
02:32
created
src/Rules/Seo/RobotsDisallowAllRule.php 1 patch
Spacing   +4 added lines, -4 removed lines patch added patch discarded remove patch
@@ -13,12 +13,12 @@  discard block
 block discarded – undo
13 13
 {
14 14
     public function validate(Response $response)
15 15
     {
16
-        $url = $response->getUri()->getScheme() . '://' . $response->getUri()->getHost();
16
+        $url = $response->getUri()->getScheme().'://'.$response->getUri()->getHost();
17 17
 
18 18
         if (substr_count($url, '/') === 2) {
19
-            $filename = $robotsUrl = $url . '/robots.txt';
19
+            $filename = $robotsUrl = $url.'/robots.txt';
20 20
         } elseif (substr_count($url, '/') === 3) {
21
-            $filename = $robotsUrl = $url . 'robots.txt';
21
+            $filename = $robotsUrl = $url.'robots.txt';
22 22
         } else {
23 23
             return;
24 24
         }
@@ -30,7 +30,7 @@  discard block
 block discarded – undo
30 30
             $content = file_get_contents($filename);
31 31
             $normalizedContent = strtolower(str_replace(' ', '', $content));
32 32
 
33
-            if (strpos($normalizedContent, 'disallow:/' . PHP_EOL) !== false) {
33
+            if (strpos($normalizedContent, 'disallow:/'.PHP_EOL) !== false) {
34 34
                 throw new ValidationFailedException('The robots.txt contains disallow all (Disallow: /)');
35 35
             }
36 36
 
Please login to merge, or discard this patch.