1
|
|
|
package main |
2
|
|
|
|
3
|
|
|
import ( |
4
|
|
|
"bufio" |
5
|
|
|
"flag" |
6
|
|
|
"io" |
7
|
|
|
"os" |
8
|
|
|
"os/signal" |
9
|
|
|
"runtime" |
10
|
|
|
"syscall" |
11
|
|
|
|
12
|
|
|
"github.com/imkira/go-task" |
13
|
|
|
) |
14
|
|
|
|
15
|
|
|
var displayProgress = true |
16
|
|
|
|
17
|
|
|
func main() { |
18
|
|
|
// var err error |
19
|
|
|
var proxy, filePath, bwLimit, resumeTask string |
20
|
|
|
|
21
|
|
|
conn := flag.Int("n", runtime.NumCPU(), "number of connections") |
22
|
|
|
skiptls := flag.Bool("skip-tls", false, "skip certificate verification for https") |
23
|
|
|
flag.StringVar(&proxy, "proxy", "", "proxy for downloading, e.g. -proxy '127.0.0.1:12345' for socks5 or -proxy 'http://proxy.com:8080' for http proxy") |
24
|
|
|
flag.StringVar(&filePath, "file", "", "path to a file that contains one URL per line") |
25
|
|
|
flag.StringVar(&bwLimit, "rate", "", "bandwidth limit during download, e.g. -rate 10kB or -rate 10MiB") |
26
|
|
|
flag.StringVar(&resumeTask, "resume", "", "resume download task with given task name (or URL)") |
27
|
|
|
|
28
|
|
|
flag.Parse() |
29
|
|
|
args := flag.Args() |
30
|
|
|
|
31
|
|
|
// If the resume flag is provided, use that path (ignoring other arguments) |
32
|
|
|
if resumeTask != "" { |
33
|
|
|
state, err := Resume(resumeTask) |
34
|
|
|
FatalCheck(err) |
35
|
|
|
Execute(state.URL, state, *conn, *skiptls, proxy, bwLimit) |
36
|
|
|
return |
37
|
|
|
} |
38
|
|
|
|
39
|
|
|
// If no resume flag, then check for positional URL or file input |
40
|
|
|
if len(args) < 1 { |
41
|
|
|
if len(filePath) < 1 { |
42
|
|
|
Errorln("A URL or input file with URLs is required") |
43
|
|
|
usage() |
44
|
|
|
os.Exit(1) |
45
|
|
|
} |
46
|
|
|
// Create a serial group for processing multiple URLs in a file. |
47
|
|
|
g1 := task.NewSerialGroup() |
48
|
|
|
file, err := os.Open(filePath) |
49
|
|
|
if err != nil { |
50
|
|
|
FatalCheck(err) |
51
|
|
|
} |
52
|
|
|
defer file.Close() |
53
|
|
|
|
54
|
|
|
reader := bufio.NewReader(file) |
55
|
|
|
for { |
56
|
|
|
line, _, err := reader.ReadLine() |
57
|
|
|
if err == io.EOF { |
58
|
|
|
break |
59
|
|
|
} |
60
|
|
|
url := string(line) |
61
|
|
|
// Add the download task for each URL |
62
|
|
|
g1.AddChild(downloadTask(url, nil, *conn, *skiptls, proxy, bwLimit)) |
63
|
|
|
} |
64
|
|
|
g1.Run(nil) |
65
|
|
|
return |
66
|
|
|
} |
67
|
|
|
|
68
|
|
|
// Otherwise, if a URL is provided as positional argument, treat it as a new download. |
69
|
|
|
downloadURL := args[0] |
70
|
|
|
// Check if a folder already exists for the task and remove if necessary. |
71
|
|
|
if ExistDir(FolderOf(downloadURL)) { |
72
|
|
|
Warnf("Downloading task already exists, remove it first \n") |
73
|
|
|
err := os.RemoveAll(FolderOf(downloadURL)) |
74
|
|
|
FatalCheck(err) |
75
|
|
|
} |
76
|
|
|
Execute(downloadURL, nil, *conn, *skiptls, proxy, bwLimit) |
77
|
|
|
} |
78
|
|
|
|
79
|
|
|
func downloadTask(url string, state *State, conn int, skiptls bool, proxy string, bwLimit string) task.Task { |
80
|
|
|
run := func(t task.Task, ctx task.Context) { |
81
|
|
|
Execute(url, state, conn, skiptls, proxy, bwLimit) |
82
|
|
|
} |
83
|
|
|
return task.NewTaskWithFunc(run) |
84
|
|
|
} |
85
|
|
|
|
86
|
|
|
// Execute configures the HTTPDownloader and uses it to download the target. |
87
|
|
|
func Execute(url string, state *State, conn int, skiptls bool, proxy string, bwLimit string) { |
88
|
|
|
// Capture OS interrupt signals |
89
|
|
|
signalChan := make(chan os.Signal, 1) |
90
|
|
|
signal.Notify(signalChan, |
91
|
|
|
syscall.SIGHUP, |
92
|
|
|
syscall.SIGINT, |
93
|
|
|
syscall.SIGTERM, |
94
|
|
|
syscall.SIGQUIT) |
95
|
|
|
|
96
|
|
|
var files = make([]string, 0) |
97
|
|
|
var parts = make([]Part, 0) |
98
|
|
|
var isInterrupted = false |
99
|
|
|
|
100
|
|
|
doneChan := make(chan bool, conn) |
101
|
|
|
fileChan := make(chan string, conn) |
102
|
|
|
errorChan := make(chan error, 1) |
103
|
|
|
stateChan := make(chan Part, 1) |
104
|
|
|
interruptChan := make(chan bool, conn) |
105
|
|
|
|
106
|
|
|
var downloader *HTTPDownloader |
107
|
|
|
if state == nil { |
108
|
|
|
downloader = NewHTTPDownloader(url, conn, skiptls, proxy, bwLimit) |
109
|
|
|
} else { |
110
|
|
|
downloader = &HTTPDownloader{ |
111
|
|
|
url: state.URL, |
112
|
|
|
file: TaskFromURL(state.URL), |
113
|
|
|
par: int64(len(state.Parts)), |
114
|
|
|
parts: state.Parts, |
115
|
|
|
resumable: true, |
116
|
|
|
} |
117
|
|
|
} |
118
|
|
|
go downloader.Do(doneChan, fileChan, errorChan, interruptChan, stateChan) |
119
|
|
|
|
120
|
|
|
for { |
121
|
|
|
select { |
122
|
|
|
case <-signalChan: |
123
|
|
|
// Signal all active download routines to interrupt. |
124
|
|
|
isInterrupted = true |
125
|
|
|
for range conn { |
126
|
|
|
interruptChan <- true |
127
|
|
|
} |
128
|
|
|
case file := <-fileChan: |
129
|
|
|
files = append(files, file) |
130
|
|
|
case err := <-errorChan: |
131
|
|
|
Errorf("%v", err) |
132
|
|
|
panic(err) |
133
|
|
|
case part := <-stateChan: |
134
|
|
|
parts = append(parts, part) |
135
|
|
|
case <-doneChan: |
136
|
|
|
if isInterrupted { |
137
|
|
|
if downloader.resumable { |
138
|
|
|
Printf("Interrupted, saving state...\n") |
139
|
|
|
s := &State{URL: url, Parts: parts} |
140
|
|
|
if err := s.Save(); err != nil { |
141
|
|
|
Errorf("%v\n", err) |
142
|
|
|
} |
143
|
|
|
} else { |
144
|
|
|
Warnf("Interrupted, but the download is not resumable. Exiting silently.\n") |
145
|
|
|
} |
146
|
|
|
} else { |
147
|
|
|
err := JoinFile(files, TaskFromURL(url)) |
148
|
|
|
FatalCheck(err) |
149
|
|
|
err = os.RemoveAll(FolderOf(url)) |
150
|
|
|
FatalCheck(err) |
151
|
|
|
} |
152
|
|
|
return |
153
|
|
|
} |
154
|
|
|
} |
155
|
|
|
} |
156
|
|
|
|
157
|
|
|
func usage() { |
158
|
|
|
Printf(`Usage: |
159
|
|
|
hget [options] URL |
160
|
|
|
hget [options] --resume=TaskName |
161
|
|
|
|
162
|
|
|
Options: |
163
|
|
|
-n int number of connections (default number of CPUs) |
164
|
|
|
-skip-tls bool skip certificate verification for https (default false) |
165
|
|
|
-proxy string proxy address (e.g., '127.0.0.1:12345' for socks5 or 'http://proxy.com:8080') |
166
|
|
|
-file string file path containing URLs (one per line) |
167
|
|
|
-rate string bandwidth limit during download (e.g., 10kB, 10MiB) |
168
|
|
|
-resume string resume a stopped download by providing its task name or URL |
169
|
|
|
`) |
170
|
|
|
} |
171
|
|
|
|