1
|
|
|
package cmd |
2
|
|
|
|
3
|
|
|
import ( |
4
|
|
|
"context" |
5
|
|
|
"fmt" |
6
|
|
|
"net/http" |
7
|
|
|
"net/url" |
8
|
|
|
"os" |
9
|
|
|
"os/signal" |
10
|
|
|
|
11
|
|
|
"github.com/pkg/errors" |
12
|
|
|
"github.com/sirupsen/logrus" |
13
|
|
|
"github.com/spf13/cobra" |
14
|
|
|
"github.com/stefanoj3/dirstalk/pkg/cmd/termination" |
15
|
|
|
"github.com/stefanoj3/dirstalk/pkg/common" |
16
|
|
|
"github.com/stefanoj3/dirstalk/pkg/dictionary" |
17
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan" |
18
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/client" |
19
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/filter" |
20
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/output" |
21
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/producer" |
22
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/summarizer" |
23
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/summarizer/tree" |
24
|
|
|
) |
25
|
|
|
|
26
|
|
|
func NewScanCommand(logger *logrus.Logger) *cobra.Command { |
27
|
|
|
cmd := &cobra.Command{ |
28
|
|
|
Use: "scan [url]", |
29
|
|
|
Short: "Scan the given URL", |
30
|
|
|
RunE: buildScanFunction(logger), |
31
|
|
|
} |
32
|
|
|
|
33
|
|
|
cmd.Flags().StringP( |
34
|
|
|
flagScanDictionary, |
35
|
|
|
flagScanDictionaryShort, |
36
|
|
|
"", |
37
|
|
|
"dictionary to use for the scan (path to local file or remote url)", |
38
|
|
|
) |
39
|
|
|
common.Must(cmd.MarkFlagFilename(flagScanDictionary)) |
40
|
|
|
common.Must(cmd.MarkFlagRequired(flagScanDictionary)) |
41
|
|
|
|
42
|
|
|
cmd.Flags().IntP( |
43
|
|
|
flagScanDictionaryGetTimeout, |
44
|
|
|
"", |
45
|
|
|
50000, |
46
|
|
|
"timeout in milliseconds (used when fetching remote dictionary)", |
47
|
|
|
) |
48
|
|
|
|
49
|
|
|
cmd.Flags().StringSlice( |
50
|
|
|
flagScanHTTPMethods, |
51
|
|
|
[]string{"GET"}, |
52
|
|
|
"comma separated list of http methods to use; eg: GET,POST,PUT", |
53
|
|
|
) |
54
|
|
|
|
55
|
|
|
cmd.Flags().IntSlice( |
56
|
|
|
flagScanHTTPStatusesToIgnore, |
57
|
|
|
[]int{http.StatusNotFound}, |
58
|
|
|
"comma separated list of http statuses to ignore when showing and processing results; eg: 404,301", |
59
|
|
|
) |
60
|
|
|
|
61
|
|
|
cmd.Flags().IntP( |
62
|
|
|
flagScanThreads, |
63
|
|
|
flagScanThreadsShort, |
64
|
|
|
3, |
65
|
|
|
"amount of threads for concurrent requests", |
66
|
|
|
) |
67
|
|
|
|
68
|
|
|
cmd.Flags().IntP( |
69
|
|
|
flagScanHTTPTimeout, |
70
|
|
|
"", |
71
|
|
|
5000, |
72
|
|
|
"timeout in milliseconds", |
73
|
|
|
) |
74
|
|
|
|
75
|
|
|
cmd.Flags().BoolP( |
76
|
|
|
flagScanHTTPCacheRequests, |
77
|
|
|
"", |
78
|
|
|
true, |
79
|
|
|
"cache requests to avoid performing the same request multiple times within the same scan (EG if the "+ |
80
|
|
|
"server reply with the same redirect location multiple times, dirstalk will follow it only once)", |
81
|
|
|
) |
82
|
|
|
|
83
|
|
|
cmd.Flags().IntP( |
84
|
|
|
flagScanScanDepth, |
85
|
|
|
"", |
86
|
|
|
3, |
87
|
|
|
"scan depth", |
88
|
|
|
) |
89
|
|
|
|
90
|
|
|
cmd.Flags().StringP( |
91
|
|
|
flagScanSocks5Host, |
92
|
|
|
"", |
93
|
|
|
"", |
94
|
|
|
"socks5 host to use", |
95
|
|
|
) |
96
|
|
|
|
97
|
|
|
cmd.Flags().StringP( |
98
|
|
|
flagScanUserAgent, |
99
|
|
|
"", |
100
|
|
|
"", |
101
|
|
|
"user agent to use for http requests", |
102
|
|
|
) |
103
|
|
|
|
104
|
|
|
cmd.Flags().BoolP( |
105
|
|
|
flagScanCookieJar, |
106
|
|
|
"", |
107
|
|
|
false, |
108
|
|
|
"enables the use of a cookie jar: it will retain any cookie sent "+ |
109
|
|
|
"from the server and send them for the following requests", |
110
|
|
|
) |
111
|
|
|
|
112
|
|
|
cmd.Flags().StringArray( |
113
|
|
|
flagScanCookie, |
114
|
|
|
[]string{}, |
115
|
|
|
"cookie to add to each request; eg name=value (can be specified multiple times)", |
116
|
|
|
) |
117
|
|
|
|
118
|
|
|
cmd.Flags().StringArray( |
119
|
|
|
flagScanHeader, |
120
|
|
|
[]string{}, |
121
|
|
|
"header to add to each request; eg name=value (can be specified multiple times)", |
122
|
|
|
) |
123
|
|
|
|
124
|
|
|
cmd.Flags().String( |
125
|
|
|
flagScanResultOutput, |
126
|
|
|
"", |
127
|
|
|
"path where to store result output", |
128
|
|
|
) |
129
|
|
|
|
130
|
|
|
cmd.Flags().Bool( |
131
|
|
|
flagShouldSkipSSLCertificatesValidation, |
132
|
|
|
false, |
133
|
|
|
"to skip checking the validity of SSL certificates", |
134
|
|
|
) |
135
|
|
|
|
136
|
|
|
cmd.Flags().Bool( |
137
|
|
|
flagIgnore20xWithEmptyBody, |
138
|
|
|
false, |
139
|
|
|
"ignore HTTP 20x responses with empty body", |
140
|
|
|
) |
141
|
|
|
|
142
|
|
|
return cmd |
143
|
|
|
} |
144
|
|
|
|
145
|
|
|
func buildScanFunction(logger *logrus.Logger) func(cmd *cobra.Command, args []string) error { |
146
|
|
|
f := func(cmd *cobra.Command, args []string) error { |
147
|
|
|
u, err := getURL(args) |
148
|
|
|
if err != nil { |
149
|
|
|
return err |
150
|
|
|
} |
151
|
|
|
|
152
|
|
|
cnf, err := scanConfigFromCmd(cmd) |
153
|
|
|
if err != nil { |
154
|
|
|
return errors.Wrap(err, "failed to build config") |
155
|
|
|
} |
156
|
|
|
|
157
|
|
|
return startScan(logger, cnf, u) |
158
|
|
|
} |
159
|
|
|
|
160
|
|
|
return f |
161
|
|
|
} |
162
|
|
|
|
163
|
|
|
func getURL(args []string) (*url.URL, error) { |
164
|
|
|
if len(args) == 0 { |
165
|
|
|
return nil, errors.New("no URL provided") |
166
|
|
|
} |
167
|
|
|
|
168
|
|
|
arg := args[0] |
169
|
|
|
|
170
|
|
|
u, err := url.ParseRequestURI(arg) |
171
|
|
|
if err != nil { |
172
|
|
|
return nil, errors.Wrap(err, "the first argument must be a valid url") |
173
|
|
|
} |
174
|
|
|
|
175
|
|
|
return u, nil |
176
|
|
|
} |
177
|
|
|
|
178
|
|
|
// startScan is a convenience method that wires together all the dependencies needed to start a scan. |
179
|
|
|
func startScan(logger *logrus.Logger, cnf *scan.Config, u *url.URL) error { |
180
|
|
|
dict, err := buildDictionary(cnf, u) |
181
|
|
|
if err != nil { |
182
|
|
|
return err |
183
|
|
|
} |
184
|
|
|
|
185
|
|
|
s, err := buildScanner(cnf, dict, u, logger) |
186
|
|
|
if err != nil { |
187
|
|
|
return err |
188
|
|
|
} |
189
|
|
|
|
190
|
|
|
logger.WithFields(logrus.Fields{ |
191
|
|
|
"url": u.String(), |
192
|
|
|
"threads": cnf.Threads, |
193
|
|
|
"dictionary-length": len(dict), |
194
|
|
|
"scan-depth": cnf.ScanDepth, |
195
|
|
|
"timeout": cnf.TimeoutInMilliseconds, |
196
|
|
|
"socks5": cnf.Socks5Url, |
197
|
|
|
"cookies": stringifyCookies(cnf.Cookies), |
198
|
|
|
"cookie-jar": cnf.UseCookieJar, |
199
|
|
|
"headers": stringifyHeaders(cnf.Headers), |
200
|
|
|
"user-agent": cnf.UserAgent, |
201
|
|
|
}).Info("Starting scan") |
202
|
|
|
|
203
|
|
|
resultSummarizer := summarizer.NewResultSummarizer(tree.NewResultTreeProducer(), logger) |
204
|
|
|
|
205
|
|
|
osSigint := make(chan os.Signal, 1) |
206
|
|
|
signal.Notify(osSigint, os.Interrupt) |
207
|
|
|
|
208
|
|
|
outputSaver, err := newOutputSaver(cnf.Out) |
209
|
|
|
if err != nil { |
210
|
|
|
return errors.Wrap(err, "failed to create output saver") |
211
|
|
|
} |
212
|
|
|
|
213
|
|
|
defer func() { |
214
|
|
|
resultSummarizer.Summarize() |
215
|
|
|
|
216
|
|
|
err := outputSaver.Close() |
217
|
|
|
if err != nil { |
218
|
|
|
logger.WithError(err).Error("failed to close output file") |
219
|
|
|
} |
220
|
|
|
|
221
|
|
|
logger.Info("Finished scan") |
222
|
|
|
}() |
223
|
|
|
|
224
|
|
|
ctx, cancellationFunc := context.WithCancel(context.Background()) |
225
|
|
|
defer cancellationFunc() |
226
|
|
|
|
227
|
|
|
resultsChannel := s.Scan(ctx, u, cnf.Threads) |
228
|
|
|
|
229
|
|
|
terminationHandler := termination.NewTerminationHandler(2) |
230
|
|
|
|
231
|
|
|
for { |
232
|
|
|
select { |
233
|
|
|
case <-osSigint: |
234
|
|
|
terminationHandler.SignalTermination() |
235
|
|
|
cancellationFunc() |
236
|
|
|
|
237
|
|
|
if terminationHandler.ShouldTerminate() { |
238
|
|
|
logger.Info("Received sigint, terminating...") |
239
|
|
|
|
240
|
|
|
return nil |
241
|
|
|
} |
242
|
|
|
|
243
|
|
|
logger.Info( |
244
|
|
|
"Received sigint, trying to shutdown gracefully, another SIGNINT will terminate the application", |
245
|
|
|
) |
246
|
|
|
case result, ok := <-resultsChannel: |
247
|
|
|
if !ok { |
248
|
|
|
logger.Debug("result channel is being closed, scan should be complete") |
249
|
|
|
|
250
|
|
|
return nil |
251
|
|
|
} |
252
|
|
|
|
253
|
|
|
resultSummarizer.Add(result) |
254
|
|
|
|
255
|
|
|
if err := outputSaver.Save(result); err != nil { |
256
|
|
|
return errors.Wrap(err, "failed to add output to file") |
257
|
|
|
} |
258
|
|
|
} |
259
|
|
|
} |
260
|
|
|
} |
261
|
|
|
|
262
|
|
|
func buildScanner(cnf *scan.Config, dict []string, u *url.URL, logger *logrus.Logger) (*scan.Scanner, error) { |
263
|
|
|
targetProducer := producer.NewDictionaryProducer(cnf.HTTPMethods, dict, cnf.ScanDepth) |
264
|
|
|
reproducer := producer.NewReProducer(targetProducer) |
265
|
|
|
|
266
|
|
|
resultFilter := filter.NewHTTPStatusResultFilter(cnf.HTTPStatusesToIgnore, cnf.IgnoreEmpty20xResponses) |
267
|
|
|
|
268
|
|
|
scannerClient, err := buildScannerClient(cnf, u) |
269
|
|
|
if err != nil { |
270
|
|
|
return nil, err |
271
|
|
|
} |
272
|
|
|
|
273
|
|
|
s := scan.NewScanner( |
274
|
|
|
scannerClient, |
275
|
|
|
targetProducer, |
276
|
|
|
reproducer, |
277
|
|
|
resultFilter, |
278
|
|
|
logger, |
279
|
|
|
) |
280
|
|
|
|
281
|
|
|
return s, nil |
282
|
|
|
} |
283
|
|
|
|
284
|
|
|
func buildDictionary(cnf *scan.Config, u *url.URL) ([]string, error) { |
285
|
|
|
c, err := buildDictionaryClient(cnf, u) |
286
|
|
|
if err != nil { |
287
|
|
|
return nil, err |
288
|
|
|
} |
289
|
|
|
|
290
|
|
|
dict, err := dictionary.NewDictionaryFrom(cnf.DictionaryPath, c) |
291
|
|
|
if err != nil { |
292
|
|
|
return nil, errors.Wrap(err, "failed to build dictionary") |
293
|
|
|
} |
294
|
|
|
|
295
|
|
|
return dict, nil |
296
|
|
|
} |
297
|
|
|
|
298
|
|
|
func buildScannerClient(cnf *scan.Config, u *url.URL) (*http.Client, error) { |
299
|
|
|
c, err := client.NewClientFromConfig( |
300
|
|
|
cnf.TimeoutInMilliseconds, |
301
|
|
|
cnf.Socks5Url, |
302
|
|
|
cnf.UserAgent, |
303
|
|
|
cnf.UseCookieJar, |
304
|
|
|
cnf.Cookies, |
305
|
|
|
cnf.Headers, |
306
|
|
|
cnf.CacheRequests, |
307
|
|
|
cnf.ShouldSkipSSLCertificatesValidation, |
308
|
|
|
u, |
309
|
|
|
) |
310
|
|
|
if err != nil { |
311
|
|
|
return nil, errors.Wrap(err, "failed to build scanner client") |
312
|
|
|
} |
313
|
|
|
|
314
|
|
|
return c, nil |
315
|
|
|
} |
316
|
|
|
|
317
|
|
|
func buildDictionaryClient(cnf *scan.Config, u *url.URL) (*http.Client, error) { |
318
|
|
|
c, err := client.NewClientFromConfig( |
319
|
|
|
cnf.DictionaryTimeoutInMilliseconds, |
320
|
|
|
cnf.Socks5Url, |
321
|
|
|
cnf.UserAgent, |
322
|
|
|
cnf.UseCookieJar, |
323
|
|
|
cnf.Cookies, |
324
|
|
|
cnf.Headers, |
325
|
|
|
cnf.CacheRequests, |
326
|
|
|
cnf.ShouldSkipSSLCertificatesValidation, |
327
|
|
|
u, |
328
|
|
|
) |
329
|
|
|
if err != nil { |
330
|
|
|
return nil, errors.Wrap(err, "failed to build dictionary client") |
331
|
|
|
} |
332
|
|
|
|
333
|
|
|
return c, nil |
334
|
|
|
} |
335
|
|
|
|
336
|
|
|
func newOutputSaver(path string) (OutputSaver, error) { |
337
|
|
|
if path == "" { |
338
|
|
|
return output.NewNullSaver(), nil |
339
|
|
|
} |
340
|
|
|
|
341
|
|
|
return output.NewFileSaver(path) |
342
|
|
|
} |
343
|
|
|
|
344
|
|
|
func stringifyCookies(cookies []*http.Cookie) string { |
345
|
|
|
result := "" |
346
|
|
|
|
347
|
|
|
for _, cookie := range cookies { |
348
|
|
|
result += fmt.Sprintf("{%s=%s}", cookie.Name, cookie.Value) |
349
|
|
|
} |
350
|
|
|
|
351
|
|
|
return result |
352
|
|
|
} |
353
|
|
|
|
354
|
|
|
func stringifyHeaders(headers map[string]string) string { |
355
|
|
|
result := "" |
356
|
|
|
|
357
|
|
|
for name, value := range headers { |
358
|
|
|
result += fmt.Sprintf("{%s:%s}", name, value) |
359
|
|
|
} |
360
|
|
|
|
361
|
|
|
return result |
362
|
|
|
} |
363
|
|
|
|