GitHub Access Token became invalid

It seems like the GitHub access token used for retrieving details about this repository from GitHub became invalid. This might prevent certain types of inspections from being run (in particular, everything related to pull requests).
Please ask an admin of your repository to re-new the access token on this website.
Completed
Pull Request — master (#36)
by
unknown
03:32
created

ScanCommand::configure()   A

Complexity

Conditions 1
Paths 1

Size

Total Lines 57
Code Lines 46

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
dl 0
loc 57
rs 9.6818
c 0
b 0
f 0
cc 1
eloc 46
nc 1
nop 0

How to fix   Long Method   

Long Method

Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.

For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.

Commonly applied refactorings include:

1
<?php
2
3
namespace Spatie\HttpStatusCheck;
4
5
use Spatie\Crawler\Crawler;
6
use GuzzleHttp\RequestOptions;
7
use Spatie\Crawler\CrawlAllUrls;
8
use Spatie\Crawler\CrawlInternalUrls;
9
use Symfony\Component\Console\Command\Command;
10
use Symfony\Component\Console\Input\InputOption;
11
use Symfony\Component\Console\Input\InputArgument;
12
use Symfony\Component\Console\Input\InputInterface;
13
use Symfony\Component\Console\Output\OutputInterface;
14
use Symfony\Component\Console\Question\ConfirmationQuestion;
15
16
class ScanCommand extends Command
17
{
18
    protected function configure()
19
    {
20
        $this->setName('scan')
21
            ->setDescription('Check the http status code of all links on a website.')
22
            ->addArgument(
23
                'url',
24
                InputArgument::REQUIRED,
25
                'The url to check'
26
            )
27
            ->addOption(
28
                'concurrency',
29
                'c',
30
                InputOption::VALUE_REQUIRED,
31
                'The amount of concurrent connections to use',
32
                10
33
            )
34
            ->addOption(
35
                'output',
36
                'o',
37
                InputOption::VALUE_REQUIRED,
38
                'Log all non-2xx and non-3xx responses in this file'
39
            )
40
            ->addOption(
41
                'dont-crawl-external-links',
42
                'x',
43
                InputOption::VALUE_NONE,
44
                'Dont crawl external links'
45
            )
46
            ->addOption(
47
                'timeout',
48
                't',
49
                InputOption::VALUE_OPTIONAL,
50
                'The maximum number of seconds the request can take',
51
                10
52
            )
53
            ->addOption(
54
                'user-agent',
55
                'u',
56
                InputOption::VALUE_OPTIONAL,
57
                'The User Agent to pass for the request',
58
                ''
59
            )
60
            ->addOption(
61
                'verify',
62
                'v',
63
                InputOption::VALUE_NONE,
64
                'Describes the SSL certificate verification behavior of a request',
65
                false
66
            )
67
            ->addOption(
68
                'options',
69
                'opt',
70
                InputOption::VALUE_IS_ARRAY,
71
                'Additional options to the request',
72
                []
73
            );
74
    }
75
76
    /**
77
     * @param \Symfony\Component\Console\Input\InputInterface $input
78
     * @param \Symfony\Component\Console\Output\OutputInterface $output
79
     *
80
     * @return int
81
     */
82
    protected function execute(InputInterface $input, OutputInterface $output)
83
    {
84
        $baseUrl = $input->getArgument('url');
85
        $crawlProfile = $input->getOption('dont-crawl-external-links') ? new CrawlInternalUrls($baseUrl) : new CrawlAllUrls();
86
87
        $output->writeln("Start scanning {$baseUrl}");
88
        $output->writeln('');
89
90
        $crawlLogger = new CrawlLogger($output);
91
92
        if ($input->getOption('output')) {
93
            $outputFile = $input->getOption('output');
94
95
            if (file_exists($outputFile)) {
96
                $helper = $this->getHelper('question');
97
                $question = new ConfirmationQuestion(
98
                    "The output file `{$outputFile}` already exists. Overwrite it? (y/n)",
99
                    false
100
                );
101
102
                if (! $helper->ask($input, $output, $question)) {
103
                    $output->writeln('Aborting...');
104
105
                    return 0;
106
                }
107
            }
108
109
            $crawlLogger->setOutputFile($input->getOption('output'));
110
        }
111
112
        Crawler::create([
113
            RequestOptions::TIMEOUT => $input->getOption('timeout'),
114
            RequestOptions::VERIFY => $input->getOption('verify'),
115
            $input->getOption('options'),
116
            $input->getOption('user-agent'),
117
            $input->getOption('options')
118
        ])
119
            ->setConcurrency($input->getOption('concurrency'))
120
            ->setCrawlObserver($crawlLogger)
121
            ->setCrawlProfile($crawlProfile)
122
            ->startCrawling($baseUrl);
123
124
        return 0;
125
    }
126
}
127