| Conditions | 14 |
| Total Lines | 63 |
| Code Lines | 42 |
| Lines | 0 |
| Ratio | 0 % |
| Changes | 0 | ||
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
Complex classes like cmd.scanHandler often do a lot of different things. To break such a class down, we need to identify a cohesive component within that class. A common approach to find such a component is to look for fields/methods that share the same prefixes, or suffixes.
Once you have determined the fields that belong together, you can apply the Extract Class refactoring. If the component makes sense as a sub-class, Extract Subclass is also a candidate, and is often faster.
| 1 | package cmd |
||
| 40 | func scanHandler(cmd *cobra.Command, args []string) { |
||
| 41 | if scanFlags.Verbosity() { |
||
| 42 | log.Visibility.Debug = true |
||
| 43 | log.Visibility.Log = true |
||
| 44 | log.Visibility.Command = true |
||
| 45 | } |
||
| 46 | |||
| 47 | log.Log("Scanning...", fmt.Sprintf("Directory is \"%s\"", util.Abs(scanFlags.Directory()))) |
||
| 48 | |||
| 49 | var files = scan.MustDir(scanFlags.Directory(), config.App.Extensions()) |
||
| 50 | poolSize, chunkSize := util.AdjustSizes(len(files), PoolSize, MinChunkSize) |
||
| 51 | |||
| 52 | var chunks = make(chan vars.Chunk) |
||
| 53 | var scannedFiles = make(chan vars.File) |
||
| 54 | var wg sync.WaitGroup |
||
| 55 | scancmd.CreatePool( |
||
| 56 | &wg, |
||
| 57 | poolSize, |
||
| 58 | chunks, |
||
| 59 | func(files vars.Chunk) ([]byte, error) { |
||
| 60 | return etool.Read(files, config.App.Fields()) |
||
| 61 | }, |
||
| 62 | func(data []byte) { |
||
| 63 | for _, parsed := range etool.Parse(data) { |
||
| 64 | scannedFiles <- parsed |
||
| 65 | } |
||
| 66 | }, |
||
| 67 | ) |
||
| 68 | |||
| 69 | for _, chunk := range files.Split(chunkSize) { |
||
| 70 | wg.Add(1) |
||
| 71 | chunks <- chunk |
||
| 72 | } |
||
| 73 | |||
| 74 | go func() { |
||
| 75 | wg.Wait() |
||
| 76 | close(chunks) |
||
| 77 | close(scannedFiles) |
||
| 78 | }() |
||
| 79 | |||
| 80 | outputFilename := randomizeOutputFilename(scanFlags.Filename()) |
||
| 81 | |||
| 82 | headers := packHeaders(config.App.Fields()) |
||
| 83 | wr, err := writers.Get(scanFlags.Format()) |
||
| 84 | if err != nil { |
||
| 85 | logWriterFatal(err) |
||
| 86 | } |
||
| 87 | |||
| 88 | err = wr.Open(outputFilename, headers) |
||
| 89 | if err != nil { |
||
| 90 | logWriterFatal(err) |
||
| 91 | } |
||
| 92 | defer wr.Close() |
||
| 93 | |||
| 94 | for file := range scannedFiles { |
||
| 95 | file.WithRelPath(scanFlags.Directory()) |
||
| 96 | err := wr.Write(&file) |
||
| 97 | if err != nil { |
||
| 98 | log.Failure("CSV write", fmt.Sprintf("failed writing data for \"%s\" file", file.RelPath())) |
||
| 99 | } |
||
| 100 | } |
||
| 101 | |||
| 102 | log.Done("Scanning completed", fmt.Sprintf("Output file is \"%s\" file", outputFilename)) |
||
| 103 | } |
||
| 128 |