Passed
Push — master ( 264c22...c2f968 )
by Abouzar
02:32
created

http.go (10 issues)

1
package main
2
3
import (
4
	"crypto/tls"
5
	"fmt"
6
	"io"
7
	"net"
8
	"net/http"
9
	stdurl "net/url"
10
	"os"
11
	"path/filepath"
12
	"strconv"
13
	"strings"
14
	"sync"
15
16
	"github.com/alecthomas/units"
17
	"github.com/fatih/color"
18
	"github.com/fujiwara/shapeio"
19
	"golang.org/x/net/proxy"
20
	pb "gopkg.in/cheggaaa/pb.v1"
21
)
22
23
var (
24
	tr = &http.Transport{
25
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
26
	}
27
	client = &http.Client{Transport: tr}
28
)
29
30
var (
31
	acceptRangeHeader   = "Accept-Ranges"
32
	contentLengthHeader = "Content-Length"
33
)
34
35
// HttpDownloader holds the required configurations
36
type HttpDownloader struct {
0 ignored issues
show
type HttpDownloader should be HTTPDownloader
Loading history...
37
	proxy     string
38
	rate      int64
39
	url       string
40
	file      string
41
	par       int64
42
	len       int64
43
	ips       []string
44
	skipTls   bool
0 ignored issues
show
struct field skipTls should be skipTLS
Loading history...
45
	parts     []Part
46
	resumable bool
47
}
48
49
func NewHttpDownloader(url string, par int, skipTls bool, proxy_server string, bwLimit string) *HttpDownloader {
0 ignored issues
show
exported function NewHttpDownloader should have comment or be unexported
Loading history...
func parameter skipTls should be skipTLS
Loading history...
don't use underscores in Go names; func parameter proxy_server should be proxyServer
Loading history...
50
	var resumable = true
51
52
	client := ProxyAwareHttpClient(proxy_server)
53
54
	parsed, err := stdurl.Parse(url)
55
	FatalCheck(err)
56
57
	ips, err := net.LookupIP(parsed.Host)
58
	FatalCheck(err)
59
60
	ipstr := FilterIPV4(ips)
61
	Printf("Resolve ip: %s\n", strings.Join(ipstr, " | "))
62
63
	req, err := http.NewRequest("GET", url, nil)
64
	FatalCheck(err)
65
66
	resp, err := client.Do(req)
67
	FatalCheck(err)
68
69
	if resp.Header.Get(acceptRangeHeader) == "" {
70
		Printf("Target url is not supported range download, fallback to parallel 1\n")
71
		//fallback to par = 1
72
		par = 1
73
	}
74
75
	//get download range
76
	clen := resp.Header.Get(contentLengthHeader)
77
	if clen == "" {
78
		Printf("Target url not contain Content-Length header, fallback to parallel 1\n")
79
		clen = "1" //set 1 because of progress bar not accept 0 length
80
		par = 1
81
		resumable = false
82
	}
83
84
	Printf("Start download with %d connections \n", par)
85
86
	len, err := strconv.ParseInt(clen, 10, 64)
87
	FatalCheck(err)
88
89
	sizeInMb := float64(len) / (1024 * 1024)
90
91
	if clen == "1" {
92
		Printf("Download size: not specified\n")
93
	} else if sizeInMb < 1024 {
94
		Printf("Download target size: %.1f MB\n", sizeInMb)
95
	} else {
96
		Printf("Download target size: %.1f GB\n", sizeInMb/1024)
97
	}
98
99
	file := filepath.Base(url)
100
	ret := new(HttpDownloader)
101
	ret.rate = 0
102
	bandwidthLimit, err := units.ParseStrictBytes(bwLimit)
103
	if err == nil {
104
		ret.rate = bandwidthLimit
105
		Printf("Download with bandwidth limit set to %s[%d]\n", bwLimit, ret.rate)
106
	}
107
	ret.url = url
108
	ret.file = file
109
	ret.par = int64(par)
110
	ret.len = len
111
	ret.ips = ipstr
112
	ret.skipTls = skipTls
113
	ret.parts = partCalculate(int64(par), len, url)
114
	ret.resumable = resumable
115
	ret.proxy = proxy_server
116
117
	return ret
118
}
119
120
func partCalculate(par int64, len int64, url string) []Part {
121
	// Pre-allocate, perf tunning
122
	ret := make([]Part, par)
123
	for j := int64(0); j < par; j++ {
124
		from := (len / par) * j
125
		var to int64
126
		if j < par-1 {
127
			to = (len/par)*(j+1) - 1
128
		} else {
129
			to = len
130
		}
131
132
		file := filepath.Base(url)
133
		folder := FolderOf(url)
134
		if err := MkdirIfNotExist(folder); err != nil {
135
			Errorf("%v", err)
136
			os.Exit(1)
137
		}
138
139
		// Padding 0 before path name as filename will be sorted as string
140
		fname := fmt.Sprintf("%s.part%06d", file, j)
141
		path := filepath.Join(folder, fname) // ~/.hget/download-file-name/part-name
142
		ret[j] = Part{Index: j, Url: url, Path: path, RangeFrom: from, RangeTo: to}
143
	}
144
145
	return ret
146
}
147
148
func ProxyAwareHttpClient(proxy_server string) *http.Client {
0 ignored issues
show
exported function ProxyAwareHttpClient should have comment or be unexported
Loading history...
func ProxyAwareHttpClient should be ProxyAwareHTTPClient
Loading history...
don't use underscores in Go names; func parameter proxy_server should be proxyServer
Loading history...
149
	// setup a http client
150
	httpTransport := &http.Transport{}
151
	httpClient := &http.Client{Transport: httpTransport}
152
	var dialer proxy.Dialer
153
	dialer = proxy.Direct
154
155
	if len(proxy_server) > 0 {
156
		if strings.HasPrefix(proxy_server, "http") {
157
			proxyUrl, err := stdurl.Parse(proxy_server)
0 ignored issues
show
var proxyUrl should be proxyURL
Loading history...
158
			if err != nil {
159
				fmt.Fprintln(os.Stderr, "invalid proxy: ", err)
160
			}
161
			// create a http dialer
162
			dialer, err = proxy.FromURL(proxyUrl, proxy.Direct)
163
			if err == nil {
164
				httpTransport.Dial = dialer.Dial
165
			}
166
		} else {
167
			// create a socks5 dialer
168
			dialer, err := proxy.SOCKS5("tcp", proxy_server, nil, proxy.Direct)
169
			if err == nil {
170
				httpTransport.Dial = dialer.Dial
171
			}
172
		}
173
174
	}
175
	return httpClient
176
}
177
178
func (d *HttpDownloader) Do(doneChan chan bool, fileChan chan string, errorChan chan error, interruptChan chan bool, stateSaveChan chan Part) {
0 ignored issues
show
exported method HttpDownloader.Do should have comment or be unexported
Loading history...
179
	var ws sync.WaitGroup
180
	var bars []*pb.ProgressBar
181
	var barpool *pb.Pool
182
	var err error
183
184
	for _, p := range d.parts {
185
186
		if p.RangeTo <= p.RangeFrom {
187
			fileChan <- p.Path
188
			stateSaveChan <- Part{
189
				Index:     p.Index,
190
				Url:       d.url,
191
				Path:      p.Path,
192
				RangeFrom: p.RangeFrom,
193
				RangeTo:   p.RangeTo,
194
			}
195
196
			continue
197
		}
198
199
		var bar *pb.ProgressBar
200
201
		if DisplayProgressBar() {
202
			bar = pb.New64(p.RangeTo - p.RangeFrom).SetUnits(pb.U_BYTES).Prefix(color.YellowString(fmt.Sprintf("%s-%d", d.file, p.Index)))
203
			bars = append(bars, bar)
204
		}
205
206
		ws.Add(1)
207
		go func(d *HttpDownloader, bar *pb.ProgressBar, part Part) {
208
			client := ProxyAwareHttpClient(d.proxy)
209
			defer ws.Done()
210
211
			var ranges string
212
			if part.RangeTo != d.len {
213
				ranges = fmt.Sprintf("bytes=%d-%d", part.RangeFrom, part.RangeTo)
214
			} else {
215
				ranges = fmt.Sprintf("bytes=%d-", part.RangeFrom) //get all
216
			}
217
218
			//send request
219
			req, err := http.NewRequest("GET", d.url, nil)
220
			if err != nil {
221
				errorChan <- err
222
				return
223
			}
224
225
			if d.par > 1 { //support range download just in case parallel factor is over 1
226
				req.Header.Add("Range", ranges)
227
				if err != nil {
228
					errorChan <- err
229
					return
230
				}
231
			}
232
233
			//write to file
234
			resp, err := client.Do(req)
235
			if err != nil {
236
				errorChan <- err
237
				return
238
			}
239
			defer resp.Body.Close()
240
			f, err := os.OpenFile(part.Path, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600)
241
242
			defer f.Close()
243
			if err != nil {
244
				Errorf("%v\n", err)
245
				errorChan <- err
246
				return
247
			}
248
249
			var writer io.Writer
250
			if DisplayProgressBar() {
251
				writer = io.MultiWriter(f, bar)
252
			} else {
253
				writer = io.MultiWriter(f)
254
			}
255
256
			current := int64(0)
257
			finishDownloadChan := make(chan bool)
258
259
			go func() {
260
				var written int64
261
				if d.rate != 0 {
262
					reader := shapeio.NewReader(resp.Body)
263
					reader.SetRateLimit(float64(d.rate))
264
					written, _ = io.Copy(writer, reader)
265
				} else {
266
					written, _ = io.Copy(writer, resp.Body)
267
				}
268
				current += written
269
				fileChan <- part.Path
270
				finishDownloadChan <- true
271
			}()
272
273
			select {
274
			case <-interruptChan:
275
				// interrupt download by forcefully close the input stream
276
				resp.Body.Close()
277
				<-finishDownloadChan
278
			case <-finishDownloadChan:
279
			}
280
281
			stateSaveChan <- Part{
282
				Index:     part.Index,
283
				Url:       d.url,
284
				Path:      part.Path,
285
				RangeFrom: current + part.RangeFrom,
286
				RangeTo:   part.RangeTo,
287
			}
288
289
			if DisplayProgressBar() {
290
				bar.Update()
291
				bar.Finish()
292
			}
293
		}(d, bar, p)
294
	}
295
296
	barpool, err = pb.StartPool(bars...)
297
	FatalCheck(err)
298
299
	ws.Wait()
300
	doneChan <- true
301
	barpool.Stop()
302
}
303