1
|
|
|
package cmd |
2
|
|
|
|
3
|
|
|
import ( |
4
|
|
|
"fmt" |
5
|
|
|
"github.com/spf13/cobra" |
6
|
|
|
"github.com/vvval/go-metadata-scanner/cmd/scancmd" |
7
|
|
|
"github.com/vvval/go-metadata-scanner/cmd/scancmd/writers" |
8
|
|
|
"github.com/vvval/go-metadata-scanner/config" |
9
|
|
|
"github.com/vvval/go-metadata-scanner/etool" |
10
|
|
|
"github.com/vvval/go-metadata-scanner/util" |
11
|
|
|
"github.com/vvval/go-metadata-scanner/util/log" |
12
|
|
|
"github.com/vvval/go-metadata-scanner/util/rand" |
13
|
|
|
"github.com/vvval/go-metadata-scanner/util/scan" |
14
|
|
|
"github.com/vvval/go-metadata-scanner/vars" |
15
|
|
|
"os" |
16
|
|
|
"path/filepath" |
17
|
|
|
"sync" |
18
|
|
|
) |
19
|
|
|
|
20
|
|
|
var ( |
21
|
|
|
scanFlags scancmd.Flags |
22
|
|
|
PoolSize = 10 |
23
|
|
|
MinChunkSize = 5 |
24
|
|
|
) |
25
|
|
|
|
26
|
|
|
func init() { |
27
|
|
|
// cmd represents the scan command |
28
|
|
|
var cmd = &cobra.Command{ |
29
|
|
|
Use: "scan", |
30
|
|
|
Short: "Scan folder and write metadata into the output file.", |
31
|
|
|
Long: `Scan folder and write metadata into the output file. |
32
|
|
|
By default output file is a "csv" file.`, |
33
|
|
|
Run: func(cmd *cobra.Command, args []string) { |
34
|
|
|
err := scanHandler(scanFlags, config.App, PoolSize, MinChunkSize) |
35
|
|
|
if err != nil { |
36
|
|
|
log.Failure("Output writer", err.Error()) |
37
|
|
|
os.Exit(1) |
38
|
|
|
} |
39
|
|
|
}, |
40
|
|
|
} |
41
|
|
|
|
42
|
|
|
rootCmd.AddCommand(cmd) |
43
|
|
|
scanFlags.Fill(cmd) |
44
|
|
|
} |
45
|
|
|
|
46
|
|
|
func scanHandler(flags scancmd.Flags, appConfig config.AppConfig, initialPoolSize, minChunkSize int) error { |
47
|
|
|
if flags.Verbosity() { |
48
|
|
|
log.Visibility.Debug = true |
49
|
|
|
log.Visibility.Log = true |
50
|
|
|
} |
51
|
|
|
|
52
|
|
|
log.Log("Scanning...", fmt.Sprintf("Directory is \"%s\"", util.Abs(flags.Directory()))) |
53
|
|
|
|
54
|
|
|
var files = scan.MustDir(flags.Directory(), appConfig.Extensions()) |
55
|
|
|
poolSize, chunkSize := util.AdjustSizes(len(files), initialPoolSize, minChunkSize) |
56
|
|
|
|
57
|
|
|
var chunks = make(chan vars.Chunk) |
58
|
|
|
var scannedFiles = make(chan vars.File) |
59
|
|
|
var wg sync.WaitGroup |
60
|
|
|
scancmd.CreatePool( |
61
|
|
|
&wg, |
62
|
|
|
poolSize, |
63
|
|
|
chunks, |
64
|
|
|
func(files vars.Chunk) ([]byte, error) { |
65
|
|
|
return etool.Read(files, appConfig.Fields()) |
66
|
|
|
}, |
67
|
|
|
func(data []byte) { |
68
|
|
|
for _, parsed := range etool.Parse(data) { |
69
|
|
|
scannedFiles <- parsed |
70
|
|
|
} |
71
|
|
|
}, |
72
|
|
|
) |
73
|
|
|
|
74
|
|
|
for _, chunk := range files.Split(chunkSize) { |
75
|
|
|
wg.Add(1) |
76
|
|
|
chunks <- chunk |
77
|
|
|
} |
78
|
|
|
|
79
|
|
|
go func() { |
80
|
|
|
wg.Wait() |
81
|
|
|
close(chunks) |
82
|
|
|
close(scannedFiles) |
83
|
|
|
}() |
84
|
|
|
|
85
|
|
|
outputFilename := randomizeOutputFilename(flags.Filename()) |
86
|
|
|
|
87
|
|
|
headers := packHeaders(appConfig.Fields()) |
88
|
|
|
wr, err := writers.Get(flags.Format()) |
89
|
|
|
if err != nil { |
90
|
|
|
return err |
91
|
|
|
} |
92
|
|
|
|
93
|
|
|
err = wr.Open(outputFilename, headers) |
94
|
|
|
if err != nil { |
95
|
|
|
return err |
96
|
|
|
} |
97
|
|
|
defer wr.Close() |
98
|
|
|
|
99
|
|
|
for file := range scannedFiles { |
100
|
|
|
file.WithRelPath(flags.Directory()) |
101
|
|
|
err := wr.Write(&file) |
102
|
|
|
if err != nil { |
103
|
|
|
log.Failure("CSV write", fmt.Sprintf("failed writing data for \"%s\" file", file.RelPath())) |
104
|
|
|
} |
105
|
|
|
} |
106
|
|
|
|
107
|
|
|
log.Done("Scanning completed", fmt.Sprintf("Output file is \"%s\" file", outputFilename)) |
108
|
|
|
|
109
|
|
|
return nil |
110
|
|
|
} |
111
|
|
|
|
112
|
|
|
func randomizeOutputFilename(path string) string { |
113
|
|
|
ext := filepath.Ext(path) |
114
|
|
|
dir := filepath.Dir(path) |
115
|
|
|
base := filepath.Base(path) |
116
|
|
|
hash := rand.Strings(10) |
117
|
|
|
|
118
|
|
|
return filepath.Join(dir, base[0:len(base)-len(ext)]+"-"+hash+ext) |
119
|
|
|
} |
120
|
|
|
|
121
|
|
|
func packHeaders(fields []string) []string { |
122
|
|
|
headers := []string{"Filename"} |
123
|
|
|
|
124
|
|
|
for _, field := range fields { |
125
|
|
|
headers = append(headers, field) |
126
|
|
|
} |
127
|
|
|
|
128
|
|
|
return headers |
129
|
|
|
} |
130
|
|
|
|