diff --git a/.travis.yml b/.travis.yml index 9502fea..12defce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,8 @@ sudo: false language: go go: - - 1.6.2 + - 1.15.1 - tip script: - make clean install - - go test \ No newline at end of file + - go test diff --git a/Makefile b/Makefile index 9f7afc9..44e4854 100644 --- a/Makefile +++ b/Makefile @@ -8,11 +8,19 @@ deps: go get -d github.com/fatih/color go get -d gopkg.in/cheggaaa/pb.v1 go get -d github.com/mattn/go-isatty + go get -d github.com/imkira/go-task + go get -d github.com/fujiwara/shapeio + go get -d github.com/alecthomas/units clean: @echo "====> Remove installed binary" rm -f bin/hget -install: deps +build: deps @echo "====> Build hget in ./bin " go build -ldflags "-X main.GitCommit=\"$(COMMIT)\"" -o bin/hget + +install: build + @echo "====> Installing hget in /usr/local/bin/hget" + chmod +x ./bin/hget + sudo mv ./bin/hget /usr/local/bin/hget diff --git a/README.md b/README.md index 3bb3449..eb87982 100644 --- a/README.md +++ b/README.md @@ -1,28 +1,58 @@ -# hget -This project is my personal project to learn golang to build something useful. +[![Build Status](https://travis-ci.com/abzcoding/hget.svg?branch=master)](https://travis-ci.com/abzcoding/hget) +[![Scrutinizer Code Quality](https://scrutinizer-ci.com/g/abzcoding/hget/badges/quality-score.png?b=master)](https://scrutinizer-ci.com/g/abzcoding/hget/?branch=master) +[![Maintainability](https://api.codeclimate.com/v1/badges/936e2aacab5946478295/maintainability)](https://codeclimate.com/github/abzcoding/hget/maintainability) +[![Codebeat](https://codebeat.co/badges/ea357ae8-4d84-4599-bff7-cffc4f28fd67)](https://codebeat.co/projects/github-com-abzcoding-hget-master) +# hget ![](https://i.gyazo.com/641166ab79e196e35d1a0ef3f9befd80.png) +### Features +- Fast (multithreading & stuff) +- Ability to interrupt/resume (task mangement) +- Support for proxies( socks5 or http) +- Bandwidth limiting +- You can give it a file that contains list of urls to download +### Install -**Build Status**: [![Build Status](https://travis-ci.org/huydx/hget.svg?branch=master)](https://travis-ci.org/huydx/hget) - -## Install - -``` -$ go get -d github.com/huydx/hget -$ cd $GOPATH/src/github.com/huydx/hget +```bash +$ go get -d github.com/abzcoding/hget +$ cd $GOPATH/src/github.com/abzcoding/hget $ make clean install ``` Binary file will be built at ./bin/hget, you can copy to /usr/bin or /usr/local/bin and even `alias wget hget` to replace wget totally :P -## Usage +### Usage + +```bash +hget [-n parallel] [-skip-tls false] [-rate bwRate] [-proxy proxy_server] [-file filename] [URL] # to download url, with n connections, and not skip tls certificate +hget tasks # get interrupted tasks +hget resume [TaskName | URL] # to resume task +hget -proxy "127.0.0.1:12345" URL # to download using socks5 proxy +hget -proxy "http://sample-proxy.com:8080" URL # to download using http proxy +hget -file sample.txt # to download a list of urls +hget -n 4 -rate 100KB URL # to download using 4 threads & limited to 100KB per second +``` +### Help ``` -hget [Url] [-n parallel] [-skip-tls false] //to download url, with n connections, and not skip tls certificate -hget tasks //get interrupted tasks -hget resume [TaskName | URL] //to resume task +[I] ➜ hget -h +Usage of hget: + -file string + filepath that contains links in each line + -n int + connection (default 16) + -proxy string + proxy for downloading, ex + -proxy '127.0.0.1:12345' for socks5 proxy + -proxy 'http://proxy.com:8080' for http proxy + -rate string + bandwidth limit to use while downloading, ex + -rate 10kB + -rate 10MiB + -skip-tls + skip verify certificate for https (default true) ``` To interrupt any on-downloading process, just ctrl-c or ctrl-d at the middle of the download, hget will safely save your data and you will be able to resume later diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..2bb2366 --- /dev/null +++ b/go.mod @@ -0,0 +1,15 @@ +module github.com/abzcoding/hget + +go 1.16 + +require ( + github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 + github.com/fatih/color v1.12.0 + github.com/fujiwara/shapeio v1.0.0 + github.com/imkira/go-task v1.0.0 + github.com/mattn/go-colorable v0.1.8 + github.com/mattn/go-isatty v0.0.13 + github.com/mattn/go-runewidth v0.0.13 // indirect + golang.org/x/net v0.0.0-20210614182718-04defd469f4e + gopkg.in/cheggaaa/pb.v1 v1.0.28 +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..7fa56e5 --- /dev/null +++ b/go.sum @@ -0,0 +1,43 @@ +github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 h1:AUNCr9CiJuwrRYS3XieqF+Z9B9gNxo/eANAJCF2eiN4= +github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= +github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fujiwara/shapeio v1.0.0 h1:xG5D9oNqCSUUbryZ/jQV3cqe1v2suEjwPIcEg1gKM8M= +github.com/fujiwara/shapeio v1.0.0/go.mod h1:LmEmu6L/8jetyj1oewewFb7bZCNRwE7wLCUNzDLaLVA= +github.com/imkira/go-task v1.0.0 h1:r8RN5nLcmVpYf/UB28d1w4XApVxDntWLAsiExNIptsY= +github.com/imkira/go-task v1.0.0/go.mod h1:xU9xcPxKeBOQTwx8ILmT8xLxrm/SFmyBhPO8SlCRyRI= +github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.13 h1:qdl+GuBjcsKKDco5BsxPJlId98mSWNKqYA+Co0SC1yA= +github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= +github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da h1:b3NXsE2LusjYGGjL5bxEVZZORm/YEFFrWFjR8eFrw/c= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/cheggaaa/pb.v1 v1.0.28 h1:n1tBJnnK2r7g9OW2btFH91V92STTUevLXYFb8gy9EMk= +gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/http.go b/http.go index 642b027..b8c6517 100644 --- a/http.go +++ b/http.go @@ -3,8 +3,6 @@ package main import ( "crypto/tls" "fmt" - "github.com/fatih/color" - pb "gopkg.in/cheggaaa/pb.v1" "io" "net" "net/http" @@ -14,6 +12,12 @@ import ( "strconv" "strings" "sync" + + "github.com/alecthomas/units" + "github.com/fatih/color" + "github.com/fujiwara/shapeio" + "golang.org/x/net/proxy" + pb "gopkg.in/cheggaaa/pb.v1" ) var ( @@ -28,19 +32,24 @@ var ( contentLengthHeader = "Content-Length" ) -type HttpDownloader struct { +// HTTPDownloader holds the required configurations +type HTTPDownloader struct { + proxy string + rate int64 url string file string par int64 len int64 ips []string - skipTls bool + skipTLS bool parts []Part resumable bool } -func NewHttpDownloader(url string, par int, skipTls bool) *HttpDownloader { +// NewHTTPDownloader returns a ProxyAwareHttpClient with given configurations. +func NewHTTPDownloader(url string, par int, skipTLS bool, proxyServer string, bwLimit string) *HTTPDownloader { var resumable = true + client := ProxyAwareHTTPClient(proxyServer) parsed, err := stdurl.Parse(url) FatalCheck(err) @@ -59,7 +68,6 @@ func NewHttpDownloader(url string, par int, skipTls bool) *HttpDownloader { if resp.Header.Get(acceptRangeHeader) == "" { Printf("Target url is not supported range download, fallback to parallel 1\n") - //fallback to par = 1 par = 1 } @@ -88,21 +96,29 @@ func NewHttpDownloader(url string, par int, skipTls bool) *HttpDownloader { } file := filepath.Base(url) - ret := new(HttpDownloader) + ret := new(HTTPDownloader) + ret.rate = 0 + bandwidthLimit, err := units.ParseStrictBytes(bwLimit) + if err == nil { + ret.rate = bandwidthLimit + Printf("Download with bandwidth limit set to %s[%d]\n", bwLimit, ret.rate) + } ret.url = url ret.file = file ret.par = int64(par) ret.len = len ret.ips = ipstr - ret.skipTls = skipTls + ret.skipTLS = skipTLS ret.parts = partCalculate(int64(par), len, url) ret.resumable = resumable + ret.proxy = proxyServer return ret } func partCalculate(par int64, len int64, url string) []Part { - ret := make([]Part, 0) + // Pre-allocate, perf tunning + ret := make([]Part, par) for j := int64(0); j < par; j++ { from := (len / par) * j var to int64 @@ -119,38 +135,79 @@ func partCalculate(par int64, len int64, url string) []Part { os.Exit(1) } - fname := fmt.Sprintf("%s.part%d", file, j) + // Padding 0 before path name as filename will be sorted as string + fname := fmt.Sprintf("%s.part%06d", file, j) path := filepath.Join(folder, fname) // ~/.hget/download-file-name/part-name - ret = append(ret, Part{Url: url, Path: path, RangeFrom: from, RangeTo: to}) + ret[j] = Part{Index: j, URL: url, Path: path, RangeFrom: from, RangeTo: to} } + return ret } -func (d *HttpDownloader) Do(doneChan chan bool, fileChan chan string, errorChan chan error, interruptChan chan bool, stateSaveChan chan Part) { +// ProxyAwareHTTPClient will use http or socks5 proxy if given one. +func ProxyAwareHTTPClient(proxyServer string) *http.Client { + // setup a http client + httpTransport := &http.Transport{} + httpClient := &http.Client{Transport: httpTransport} + var dialer proxy.Dialer + dialer = proxy.Direct + + if len(proxyServer) > 0 { + if strings.HasPrefix(proxyServer, "http") { + proxyURL, err := stdurl.Parse(proxyServer) + if err != nil { + fmt.Fprintln(os.Stderr, "invalid proxy: ", err) + } + // create a http dialer + dialer, err = proxy.FromURL(proxyURL, proxy.Direct) + if err == nil { + httpTransport.Dial = dialer.Dial + } + } else { + // create a socks5 dialer + dialer, err := proxy.SOCKS5("tcp", proxyServer, nil, proxy.Direct) + if err == nil { + httpTransport.Dial = dialer.Dial + } + } + + } + return httpClient +} + +// Do is where the magic happens. +func (d *HTTPDownloader) Do(doneChan chan bool, fileChan chan string, errorChan chan error, interruptChan chan bool, stateSaveChan chan Part) { var ws sync.WaitGroup var bars []*pb.ProgressBar var barpool *pb.Pool var err error - if DisplayProgressBar() { - bars = make([]*pb.ProgressBar, 0) - for i, part := range d.parts { - newbar := pb.New64(part.RangeTo - part.RangeFrom).SetUnits(pb.U_BYTES).Prefix(color.YellowString(fmt.Sprintf("%s-%d", d.file, i))) - bars = append(bars, newbar) + for _, p := range d.parts { + + if p.RangeTo <= p.RangeFrom { + fileChan <- p.Path + stateSaveChan <- Part{ + Index: p.Index, + URL: d.url, + Path: p.Path, + RangeFrom: p.RangeFrom, + RangeTo: p.RangeTo, + } + + continue + } + + var bar *pb.ProgressBar + + if DisplayProgressBar() { + bar = pb.New64(p.RangeTo - p.RangeFrom).SetUnits(pb.U_BYTES).Prefix(color.YellowString(fmt.Sprintf("%s-%d", d.file, p.Index))) + bars = append(bars, bar) } - barpool, err = pb.StartPool(bars...) - FatalCheck(err) - } - for i, p := range d.parts { ws.Add(1) - go func(d *HttpDownloader, loop int64, part Part) { + go func(d *HTTPDownloader, bar *pb.ProgressBar, part Part) { + client := ProxyAwareHTTPClient(d.proxy) defer ws.Done() - var bar *pb.ProgressBar - - if DisplayProgressBar() { - bar = bars[loop] - } var ranges string if part.RangeTo != d.len { @@ -197,29 +254,49 @@ func (d *HttpDownloader) Do(doneChan chan bool, fileChan chan string, errorChan writer = io.MultiWriter(f) } - //make copy interruptable by copy 100 bytes each loop current := int64(0) - for { - select { - case <-interruptChan: - stateSaveChan <- Part{Url: d.url, Path: part.Path, RangeFrom: current + part.RangeFrom, RangeTo: part.RangeTo} - return - default: - written, err := io.CopyN(writer, resp.Body, 100) - current += written - if err != nil { - if err != io.EOF { - errorChan <- err - } - bar.Finish() - fileChan <- part.Path - return - } + finishDownloadChan := make(chan bool) + + go func() { + var written int64 + if d.rate != 0 { + reader := shapeio.NewReader(resp.Body) + reader.SetRateLimit(float64(d.rate)) + written, _ = io.Copy(writer, reader) + } else { + written, _ = io.Copy(writer, resp.Body) } + current += written + fileChan <- part.Path + finishDownloadChan <- true + }() + + select { + case <-interruptChan: + // interrupt download by forcefully close the input stream + resp.Body.Close() + <-finishDownloadChan + case <-finishDownloadChan: + } + + stateSaveChan <- Part{ + Index: part.Index, + URL: d.url, + Path: part.Path, + RangeFrom: current + part.RangeFrom, + RangeTo: part.RangeTo, + } + + if DisplayProgressBar() { + bar.Update() + bar.Finish() } - }(d, int64(i), p) + }(d, bar, p) } + barpool, err = pb.StartPool(bars...) + FatalCheck(err) + ws.Wait() doneChan <- true barpool.Stop() diff --git a/http_test.go b/http_test.go index 44181eb..c7948ab 100644 --- a/http_test.go +++ b/http_test.go @@ -1,9 +1,9 @@ package main import ( - "testing" "os/user" "path/filepath" + "testing" ) func TestPartCalculate(t *testing.T) { @@ -13,15 +13,22 @@ func TestPartCalculate(t *testing.T) { if len(parts) != 10 { t.Fatalf("parts length should be 10") } - if parts[0].Url != "http://foo.bar/file" { + + if parts[0].URL != "http://foo.bar/file" { t.Fatalf("part url was wrong") } + usr, _ := user.Current() - dir := filepath.Join(usr.HomeDir, dataFolder, "file/file.part0") - if parts[0].Path != dir { + dir := filepath.Join(usr.HomeDir, dataFolder, "file/file.part000001") + if parts[1].Path != dir { t.Fatalf("part path was wrong") } + if parts[0].RangeFrom != 0 && parts[0].RangeTo != 10 { t.Fatalf("part range was wrong") } + + if parts[1].Index != 1 { + t.Fatal("part index was wrong") + } } diff --git a/joiner.go b/joiner.go index dee8793..58ed84a 100644 --- a/joiner.go +++ b/joiner.go @@ -1,13 +1,14 @@ package main import ( - "gopkg.in/cheggaaa/pb.v1" "github.com/fatih/color" + "gopkg.in/cheggaaa/pb.v1" "io" "os" "sort" ) +// JoinFile joins seperate chunks of file and forms the final downloaded artifact func JoinFile(files []string, out string) error { //sort with file name or we will join files with wrong order sort.Strings(files) diff --git a/main.go b/main.go index 50f8059..9881d83 100644 --- a/main.go +++ b/main.go @@ -1,28 +1,60 @@ package main import ( + "bufio" "flag" + "io" "os" "os/signal" "path/filepath" "runtime" "syscall" + + "github.com/imkira/go-task" ) var displayProgress = true func main() { var err error + var proxy, filepath, bwLimit string - conn := flag.Int("n", runtime.NumCPU(), "connection") + conn := flag.Int("n", runtime.NumCPU(), "connection") skiptls := flag.Bool("skip-tls", true, "skip verify certificate for https") + flag.StringVar(&proxy, "proxy", "", "proxy for downloading, ex \n\t-proxy '127.0.0.1:12345' for socks5 proxy\n\t-proxy 'http://proxy.com:8080' for http proxy") + flag.StringVar(&filepath, "file", "", "filepath that contains links in each line") + flag.StringVar(&bwLimit, "rate", "", "bandwidth limit to use while downloading, ex\n\t -rate 10kB\n\t-rate 10MiB") flag.Parse() args := flag.Args() if len(args) < 1 { - Errorln("url is required") - usage() - os.Exit(1) + if len(filepath) < 2 { + Errorln("url is required") + usage() + os.Exit(1) + } + // Creating a SerialGroup. + g1 := task.NewSerialGroup() + file, err := os.Open(filepath) + if err != nil { + FatalCheck(err) + } + + defer file.Close() + + reader := bufio.NewReader(file) + + for { + line, _, err := reader.ReadLine() + + if err == io.EOF { + break + } + + g1.AddChild(downloadTask(string(line), nil, *conn, *skiptls, proxy, bwLimit)) + } + g1.Run(nil) + return } command := args[0] @@ -39,15 +71,15 @@ func main() { } var task string - if IsUrl(args[1]) { - task = TaskFromUrl(args[1]) + if IsURL(args[1]) { + task = TaskFromURL(args[1]) } else { task = args[1] } state, err := Resume(task) FatalCheck(err) - Execute(state.Url, state, *conn, *skiptls) + Execute(state.URL, state, *conn, *skiptls, proxy, bwLimit) return } else { if ExistDir(FolderOf(command)) { @@ -55,16 +87,23 @@ func main() { err := os.RemoveAll(FolderOf(command)) FatalCheck(err) } - Execute(command, nil, *conn, *skiptls) + Execute(command, nil, *conn, *skiptls, proxy, bwLimit) } } -func Execute(url string, state *State, conn int, skiptls bool) { +func downloadTask(url string, state *State, conn int, skiptls bool, proxy string, bwLimit string) task.Task { + run := func(t task.Task, ctx task.Context) { + Execute(url, state, conn, skiptls, proxy, bwLimit) + } + return task.NewTaskWithFunc(run) +} + +// Execute configures the HTTPDownloader and uses it to download stuff. +func Execute(url string, state *State, conn int, skiptls bool, proxy string, bwLimit string) { //otherwise is hget command - var err error - signal_chan := make(chan os.Signal, 1) - signal.Notify(signal_chan, + signalChan := make(chan os.Signal, 1) + signal.Notify(signalChan, syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, @@ -82,17 +121,17 @@ func Execute(url string, state *State, conn int, skiptls bool) { stateChan := make(chan Part, 1) interruptChan := make(chan bool, conn) - var downloader *HttpDownloader + var downloader *HTTPDownloader if state == nil { - downloader = NewHttpDownloader(url, conn, skiptls) + downloader = NewHTTPDownloader(url, conn, skiptls, proxy, bwLimit) } else { - downloader = &HttpDownloader{url: state.Url, file: filepath.Base(state.Url), par: int64(len(state.Parts)), parts: state.Parts, resumable: true} + downloader = &HTTPDownloader{url: state.URL, file: filepath.Base(state.URL), par: int64(len(state.Parts)), parts: state.Parts, resumable: true} } go downloader.Do(doneChan, fileChan, errorChan, interruptChan, stateChan) for { select { - case <-signal_chan: + case <-signalChan: //send par number of interrupt for each routine isInterrupted = true for i := 0; i < conn; i++ { @@ -109,30 +148,27 @@ func Execute(url string, state *State, conn int, skiptls bool) { if isInterrupted { if downloader.resumable { Printf("Interrupted, saving state ... \n") - s := &State{Url: url, Parts: parts} - err := s.Save() - if err != nil { + s := &State{URL: url, Parts: parts} + if err := s.Save(); err != nil { Errorf("%v\n", err) } - return } else { Warnf("Interrupted, but downloading url is not resumable, silently die") - return } } else { - err = JoinFile(files, filepath.Base(url)) + err := JoinFile(files, filepath.Base(url)) FatalCheck(err) err = os.RemoveAll(FolderOf(url)) FatalCheck(err) - return } + return } } } func usage() { Printf(`Usage: -hget [URL] [-n connection] [-skip-tls true] +hget [-n connection] [-skip-tls true] [-proxy proxy_address] [-file filename] URL hget tasks hget resume [TaskName] `) diff --git a/resume.go b/resume.go index ba0f51d..a5bbd82 100644 --- a/resume.go +++ b/resume.go @@ -8,6 +8,7 @@ import ( "strings" ) +// TaskPrint read and prints data about current download jobs func TaskPrint() error { downloading, err := ioutil.ReadDir(filepath.Join(os.Getenv("HOME"), dataFolder)) if err != nil { @@ -28,6 +29,7 @@ func TaskPrint() error { return nil } +// Resume gets back to a previously stopped task func Resume(task string) (*State, error) { return Read(task) } diff --git a/state.go b/state.go index 529b4d8..e11a60d 100644 --- a/state.go +++ b/state.go @@ -10,22 +10,26 @@ import ( var dataFolder = ".hget/" var stateFileName = "state.json" +// State holds information about url Parts type State struct { - Url string + URL string Parts []Part } +// Part represents a chunk of downloaded file type Part struct { - Url string + Index int64 + URL string Path string RangeFrom int64 RangeTo int64 } +// Save stores downloaded file into disk func (s *State) Save() error { //make temp folder //only working in unix with env HOME - folder := FolderOf(s.Url) + folder := FolderOf(s.URL) Printf("Saving current download data in %s\n", folder) if err := MkdirIfNotExist(folder); err != nil { return err @@ -44,6 +48,7 @@ func (s *State) Save() error { return ioutil.WriteFile(filepath.Join(folder, stateFileName), j, 0644) } +// Read loads data about the state of downloaded files func Read(task string) (*State, error) { file := filepath.Join(os.Getenv("HOME"), dataFolder, task, stateFileName) Printf("Getting data from %s\n", file) diff --git a/ui.go b/ui.go index d3d00a0..1cd066c 100644 --- a/ui.go +++ b/ui.go @@ -16,6 +16,7 @@ var ( Default UI = Console{Stdout: Stdout, Stderr: Stderr} ) +// UI represents a simple IO output. type UI interface { Printf(format string, a ...interface{}) (n int, err error) Println(a ...interface{}) (n int, err error) @@ -23,43 +24,53 @@ type UI interface { Errorln(a ...interface{}) (n int, err error) } +// Printf outputs information level logs func Printf(format string, a ...interface{}) (n int, err error) { return Default.Printf(color.CyanString("INFO: ")+format, a...) } +// Errorf outputs error level logs func Errorf(format string, a ...interface{}) (n int, err error) { return Default.Errorf(color.RedString("ERROR: ")+format, a...) } +// Warnf outputs warning level logs func Warnf(format string, a ...interface{}) (n int, err error) { return Default.Errorf(color.YellowString("WARN: ")+format, a...) } +// Errorln is non formatted error printer. func Errorln(a ...interface{}) (n int, err error) { return Default.Errorln(a...) } +// IsTerminal checks if we have tty func IsTerminal(f *os.File) bool { return isatty.IsTerminal(f.Fd()) } +// Console is an implementation of UI interface type Console struct { Stdout io.Writer Stderr io.Writer } +// Printf prints formatted information logs to Stdout func (c Console) Printf(format string, a ...interface{}) (n int, err error) { return fmt.Fprintf(c.Stdout, format, a...) } +// Println prints information logs to Stdout func (c Console) Println(a ...interface{}) (n int, err error) { return fmt.Fprintln(c.Stdout, a...) } +// Errorf prints formatted error logs to Stderr func (c Console) Errorf(format string, a ...interface{}) (n int, err error) { return fmt.Fprintf(c.Stderr, format, a...) } +// Errorln prints error logs to Stderr func (c Console) Errorln(a ...interface{}) (n int, err error) { return fmt.Fprintln(c.Stderr, a...) } diff --git a/util.go b/util.go index 72f9b1e..ac9e5de 100644 --- a/util.go +++ b/util.go @@ -1,15 +1,16 @@ package main import ( + "errors" + "github.com/mattn/go-isatty" "net" + "net/url" "os" - "github.com/mattn/go-isatty" "path/filepath" - "errors" "strings" - "net/url" ) +// FatalCheck panics if err is not nil. func FatalCheck(err error) { if err != nil { Errorf("%v", err) @@ -17,6 +18,7 @@ func FatalCheck(err error) { } } +// FilterIPV4 returns parsed ipv4 string. func FilterIPV4(ips []net.IP) []string { var ret = make([]string, 0) for _, ip := range ips { @@ -27,6 +29,7 @@ func FilterIPV4(ips []net.IP) []string { return ret } +// MkdirIfNotExist creates `folder` directory if not available func MkdirIfNotExist(folder string) error { if _, err := os.Stat(folder); err != nil { if err = os.MkdirAll(folder, 0700); err != nil { @@ -36,15 +39,18 @@ func MkdirIfNotExist(folder string) error { return nil } +// ExistDir checks if `folder` is available func ExistDir(folder string) bool { _, err := os.Stat(folder) return err == nil } +// DisplayProgressBar shows a fancy progress bar func DisplayProgressBar() bool { return isatty.IsTerminal(os.Stdout.Fd()) && displayProgress } +// FolderOf makes sure you won't get LFI func FolderOf(url string) string { safePath := filepath.Join(os.Getenv("HOME"), dataFolder) fullQualifyPath, err := filepath.Abs(filepath.Join(os.Getenv("HOME"), dataFolder, filepath.Base(url))) @@ -58,21 +64,23 @@ func FolderOf(url string) string { FatalCheck(err) if strings.Contains(relative, "..") { - FatalCheck(errors.New("you may be a victim of directory traversal path attack\n")) + FatalCheck(errors.New("you may be a victim of directory traversal path attack")) return "" //return is redundant be cause in fatal check we have panic, but compiler does not able to check - } else { - return fullQualifyPath } + return fullQualifyPath + } -func TaskFromUrl(url string) string { +// TaskFromURL runs when you want to download a single url +func TaskFromURL(url string) string { //task is just download file name //so we get download file name on url filename := filepath.Base(url) return filename } -func IsUrl(s string) bool { +// IsURL checks if `s` is actually a parsable URL. +func IsURL(s string) bool { _, err := url.Parse(s) return err == nil }