1
|
|
|
package cmd |
2
|
|
|
|
3
|
|
|
import ( |
4
|
|
|
"fmt" |
5
|
|
|
"net/http" |
6
|
|
|
"net/url" |
7
|
|
|
"os" |
8
|
|
|
"os/signal" |
9
|
|
|
|
10
|
|
|
"github.com/pkg/errors" |
11
|
|
|
"github.com/sirupsen/logrus" |
12
|
|
|
"github.com/spf13/cobra" |
13
|
|
|
"github.com/stefanoj3/dirstalk/pkg/dictionary" |
14
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan" |
15
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/client" |
16
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/filter" |
17
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/output" |
18
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/producer" |
19
|
|
|
"github.com/stefanoj3/dirstalk/pkg/scan/summarizer" |
20
|
|
|
) |
21
|
|
|
|
22
|
|
|
func NewScanCommand(logger *logrus.Logger) (*cobra.Command, error) { |
|
|
|
|
23
|
|
|
cmd := &cobra.Command{ |
24
|
|
|
Use: "scan [url]", |
25
|
|
|
Short: "Scan the given URL", |
26
|
|
|
RunE: buildScanFunction(logger), |
27
|
|
|
} |
28
|
|
|
|
29
|
|
|
cmd.Flags().StringP( |
30
|
|
|
flagDictionary, |
31
|
|
|
flagDictionaryShort, |
32
|
|
|
"", |
33
|
|
|
"dictionary to use for the scan (path to local file or remote url)", |
34
|
|
|
) |
35
|
|
|
err := cmd.MarkFlagFilename(flagDictionary) |
36
|
|
|
if err != nil { |
37
|
|
|
return nil, err |
38
|
|
|
} |
39
|
|
|
|
40
|
|
|
err = cmd.MarkFlagRequired(flagDictionary) |
41
|
|
|
if err != nil { |
42
|
|
|
return nil, err |
43
|
|
|
} |
44
|
|
|
|
45
|
|
|
cmd.Flags().StringSlice( |
46
|
|
|
flagHTTPMethods, |
47
|
|
|
[]string{"GET"}, |
48
|
|
|
"comma separated list of http methods to use; eg: GET,POST,PUT", |
49
|
|
|
) |
50
|
|
|
|
51
|
|
|
cmd.Flags().IntSlice( |
52
|
|
|
flagHTTPStatusesToIgnore, |
53
|
|
|
[]int{http.StatusNotFound}, |
54
|
|
|
"comma separated list of http statuses to ignore when showing and processing results; eg: 404,301", |
55
|
|
|
) |
56
|
|
|
|
57
|
|
|
cmd.Flags().IntP( |
58
|
|
|
flagThreads, |
59
|
|
|
flagThreadsShort, |
60
|
|
|
3, |
61
|
|
|
"amount of threads for concurrent requests", |
62
|
|
|
) |
63
|
|
|
|
64
|
|
|
cmd.Flags().IntP( |
65
|
|
|
flagHTTPTimeout, |
66
|
|
|
"", |
67
|
|
|
5000, |
68
|
|
|
"timeout in milliseconds", |
69
|
|
|
) |
70
|
|
|
|
71
|
|
|
cmd.Flags().BoolP( |
72
|
|
|
flagHTTPCacheRequests, |
73
|
|
|
"", |
74
|
|
|
true, |
75
|
|
|
"cache requests to avoid performing the same request multiple times within the same scan (EG if the "+ |
76
|
|
|
"server reply with the same redirect location multiple times, dirstalk will follow it only once)", |
77
|
|
|
) |
78
|
|
|
|
79
|
|
|
cmd.Flags().IntP( |
80
|
|
|
flagScanDepth, |
81
|
|
|
"", |
82
|
|
|
3, |
83
|
|
|
"scan depth", |
84
|
|
|
) |
85
|
|
|
|
86
|
|
|
cmd.Flags().StringP( |
87
|
|
|
flagSocks5Host, |
88
|
|
|
"", |
89
|
|
|
"", |
90
|
|
|
"socks5 host to use", |
91
|
|
|
) |
92
|
|
|
|
93
|
|
|
cmd.Flags().StringP( |
94
|
|
|
flagUserAgent, |
95
|
|
|
"", |
96
|
|
|
"", |
97
|
|
|
"user agent to use for http requests", |
98
|
|
|
) |
99
|
|
|
|
100
|
|
|
cmd.Flags().BoolP( |
101
|
|
|
flagCookieJar, |
102
|
|
|
"", |
103
|
|
|
false, |
104
|
|
|
"enables the use of a cookie jar: it will retain any cookie sent "+ |
105
|
|
|
"from the server and send them for the following requests", |
106
|
|
|
) |
107
|
|
|
|
108
|
|
|
cmd.Flags().StringArray( |
109
|
|
|
flagCookie, |
110
|
|
|
[]string{}, |
111
|
|
|
"cookie to add to each request; eg name=value (can be specified multiple times)", |
112
|
|
|
) |
113
|
|
|
|
114
|
|
|
cmd.Flags().StringArray( |
115
|
|
|
flagHeader, |
116
|
|
|
[]string{}, |
117
|
|
|
"header to add to each request; eg name=value (can be specified multiple times)", |
118
|
|
|
) |
119
|
|
|
|
120
|
|
|
cmd.Flags().String( |
121
|
|
|
flagResultOutput, |
122
|
|
|
"", |
123
|
|
|
"path where to store result output", |
124
|
|
|
) |
125
|
|
|
|
126
|
|
|
return cmd, nil |
127
|
|
|
} |
128
|
|
|
|
129
|
|
|
func buildScanFunction(logger *logrus.Logger) func(cmd *cobra.Command, args []string) error { |
130
|
|
|
f := func(cmd *cobra.Command, args []string) error { |
131
|
|
|
u, err := getURL(args) |
132
|
|
|
if err != nil { |
133
|
|
|
return err |
134
|
|
|
} |
135
|
|
|
|
136
|
|
|
cnf, err := scanConfigFromCmd(cmd) |
137
|
|
|
if err != nil { |
138
|
|
|
return errors.Wrap(err, "failed to build config") |
139
|
|
|
} |
140
|
|
|
|
141
|
|
|
return startScan(logger, cnf, u) |
142
|
|
|
} |
143
|
|
|
|
144
|
|
|
return f |
145
|
|
|
} |
146
|
|
|
|
147
|
|
|
func getURL(args []string) (*url.URL, error) { |
148
|
|
|
if len(args) == 0 { |
149
|
|
|
return nil, errors.New("no URL provided") |
150
|
|
|
} |
151
|
|
|
|
152
|
|
|
arg := args[0] |
153
|
|
|
|
154
|
|
|
u, err := url.ParseRequestURI(arg) |
155
|
|
|
if err != nil { |
156
|
|
|
return nil, errors.Wrap(err, "the first argument must be a valid url") |
157
|
|
|
} |
158
|
|
|
|
159
|
|
|
return u, nil |
160
|
|
|
} |
161
|
|
|
|
162
|
|
|
// startScan is a convenience method that wires together all the dependencies needed to start a scan |
163
|
|
|
func startScan(logger *logrus.Logger, cnf *scan.Config, u *url.URL) error { |
164
|
|
|
c, err := client.NewClientFromConfig( |
165
|
|
|
cnf.TimeoutInMilliseconds, |
166
|
|
|
cnf.Socks5Url, |
167
|
|
|
cnf.UserAgent, |
168
|
|
|
cnf.UseCookieJar, |
169
|
|
|
cnf.Cookies, |
170
|
|
|
cnf.Headers, |
171
|
|
|
cnf.CacheRequests, |
172
|
|
|
u, |
173
|
|
|
) |
174
|
|
|
if err != nil { |
175
|
|
|
return errors.Wrap(err, "failed to build client") |
176
|
|
|
} |
177
|
|
|
|
178
|
|
|
dict, err := dictionary.NewDictionaryFrom(cnf.DictionaryPath, c) |
179
|
|
|
if err != nil { |
180
|
|
|
return errors.Wrap(err, "failed to build dictionary") |
181
|
|
|
} |
182
|
|
|
|
183
|
|
|
targetProducer := producer.NewDictionaryProducer(cnf.HTTPMethods, dict, cnf.ScanDepth) |
184
|
|
|
reproducer := producer.NewReProducer(targetProducer) |
185
|
|
|
|
186
|
|
|
resultFilter := filter.NewHTTPStatusResultFilter(cnf.HTTPStatusesToIgnore) |
187
|
|
|
|
188
|
|
|
s := scan.NewScanner( |
189
|
|
|
c, |
190
|
|
|
targetProducer, |
191
|
|
|
reproducer, |
192
|
|
|
resultFilter, |
193
|
|
|
logger, |
194
|
|
|
) |
195
|
|
|
|
196
|
|
|
logger.WithFields(logrus.Fields{ |
197
|
|
|
"url": u.String(), |
198
|
|
|
"threads": cnf.Threads, |
199
|
|
|
"dictionary-length": len(dict), |
200
|
|
|
"scan-depth": cnf.ScanDepth, |
201
|
|
|
"timeout": cnf.TimeoutInMilliseconds, |
202
|
|
|
"socks5": cnf.Socks5Url, |
203
|
|
|
"cookies": stringifyCookies(cnf.Cookies), |
204
|
|
|
"cookie-jar": cnf.UseCookieJar, |
205
|
|
|
"headers": stringifyHeaders(cnf.Headers), |
206
|
|
|
"user-agent": cnf.UserAgent, |
207
|
|
|
}).Info("Starting scan") |
208
|
|
|
|
209
|
|
|
resultSummarizer := summarizer.NewResultSummarizer(logger) |
210
|
|
|
|
211
|
|
|
osSigint := make(chan os.Signal, 1) |
212
|
|
|
signal.Notify(osSigint, os.Interrupt) |
213
|
|
|
|
214
|
|
|
outputSaver, err := newOutputSaver(cnf.Out) |
215
|
|
|
if err != nil { |
216
|
|
|
return errors.Wrap(err, "failed to create output saver") |
217
|
|
|
} |
218
|
|
|
|
219
|
|
|
finishFunc := func() { |
220
|
|
|
resultSummarizer.Summarize() |
221
|
|
|
err := outputSaver.Close() |
222
|
|
|
if err != nil { |
223
|
|
|
logger.WithError(err).Error("failed to close output file") |
224
|
|
|
} |
225
|
|
|
logger.Info("Finished scan") |
226
|
|
|
} |
227
|
|
|
|
228
|
|
|
for result := range s.Scan(u, cnf.Threads) { |
229
|
|
|
select { |
230
|
|
|
case <-osSigint: |
231
|
|
|
logger.Info("Received sigint, terminating...") |
232
|
|
|
finishFunc() |
233
|
|
|
return nil |
234
|
|
|
default: |
235
|
|
|
resultSummarizer.Add(result) |
236
|
|
|
err := outputSaver.Save(result) |
237
|
|
|
if err != nil { |
238
|
|
|
return errors.Wrap(err, "failed to add output to file") |
239
|
|
|
} |
240
|
|
|
} |
241
|
|
|
} |
242
|
|
|
|
243
|
|
|
finishFunc() |
244
|
|
|
|
245
|
|
|
return nil |
246
|
|
|
} |
247
|
|
|
|
248
|
|
|
func newOutputSaver(path string) (OutputSaver, error) { |
249
|
|
|
if path == "" { |
250
|
|
|
return output.NewNullSaver(), nil |
251
|
|
|
} |
252
|
|
|
|
253
|
|
|
return output.NewFileSaver(path) |
254
|
|
|
} |
255
|
|
|
|
256
|
|
|
func stringifyCookies(cookies []*http.Cookie) string { |
257
|
|
|
result := "" |
258
|
|
|
|
259
|
|
|
for _, cookie := range cookies { |
260
|
|
|
result += fmt.Sprintf("{%s=%s}", cookie.Name, cookie.Value) |
261
|
|
|
} |
262
|
|
|
|
263
|
|
|
return result |
264
|
|
|
} |
265
|
|
|
|
266
|
|
|
func stringifyHeaders(headers map[string]string) string { |
267
|
|
|
result := "" |
268
|
|
|
|
269
|
|
|
for name, value := range headers { |
270
|
|
|
result += fmt.Sprintf("{%s:%s}", name, value) |
271
|
|
|
} |
272
|
|
|
|
273
|
|
|
return result |
274
|
|
|
} |
275
|
|
|
|