Skip to content

Commit

Permalink
Merge pull request #4 from canhlinh/error-handler
Browse files Browse the repository at this point in the history
Add error handler on every part
  • Loading branch information
canhlinh authored Jul 22, 2018
2 parents 5c531fb + 54b6eb4 commit db8ca9c
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 15 deletions.
31 changes: 26 additions & 5 deletions file.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ type FileDler interface {
StopDownload()
}

// ErrorListener Handler error from a download part
type ErrorListener chan error

type File struct {
Name string
Size int64
Expand All @@ -41,6 +44,7 @@ type File struct {
header map[string]string
maxPart int64
mutex *sync.Mutex
errorListener ErrorListener
}

func NewFile(remoteURL string, cookies []*http.Cookie, header map[string]string) (*File, error) {
Expand Down Expand Up @@ -102,8 +106,8 @@ func (file *File) SetDir(dir string) {
}

func (file *File) StartDownload() error {
file.errorListener = make(ErrorListener, file.maxPart)
if file.maxPart > 1 {

rangeBytes := file.Size / file.maxPart

for part := int64(0); part < file.maxPart; part++ {
Expand All @@ -127,8 +131,10 @@ func (file *File) StartDownload() error {
}

file.monitor()
file.Wait()
close(file.ProgressHandler)
defer close(file.ProgressHandler)
if err := file.Wait(); err != nil {
return err
}
return file.join()
}

Expand Down Expand Up @@ -157,8 +163,23 @@ func (file *File) monitor() {
}()
}

func (file *File) Wait() {
file.wait.Wait()
// Wait waits download done
func (file *File) Wait() error {
select {
case <-wait(file.wait):
return nil
case err := <-file.errorListener:
return err
}
}

func wait(wg *sync.WaitGroup) chan struct{} {
c := make(chan struct{})
go func() {
wg.Wait()
c <- struct{}{}
}()
return c
}

func (file *File) join() error {
Expand Down
35 changes: 25 additions & 10 deletions file_part.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@ package idg
import (
"fmt"
"io"
"log"
"net/http"
"os"
"time"
)

const (
ReTriesDownload = 10
)

type FilePartResult struct {
}

Expand Down Expand Up @@ -46,18 +49,14 @@ func (part *FilePart) startDownload() error {

TRY_DOWNLOAD:
part.attempt++
if part.attempt > 3 {
return
} else if part.attempt > 1 {
time.Sleep(time.Second)
}

req, _ := http.NewRequest(http.MethodGet, part.File.RemoteURL, nil)
if part.File.header != nil {
for key, value := range part.File.header {
req.Header.Add(key, value)
}
}

if part.File.maxPart > 1 {
req.Header.Add("Range", fmt.Sprintf("bytes=%d-%d", part.StartByte, part.EndByte))
}
Expand All @@ -68,8 +67,12 @@ func (part *FilePart) startDownload() error {
part.File.mutex.Lock()
res, err := http.DefaultTransport.RoundTrip(req)
if err != nil {
log.Println(err)
part.File.mutex.Unlock()
if part.attempt > ReTriesDownload {
part.File.errorListener <- err
return
}
time.Sleep(3 * time.Second)
goto TRY_DOWNLOAD
}
part.File.mutex.Unlock()
Expand All @@ -80,13 +83,25 @@ func (part *FilePart) startDownload() error {

fileWriter, err := os.Create(part.path)
if err != nil {
log.Println(err)
if part.attempt > ReTriesDownload {
part.File.errorListener <- err
return
}
time.Sleep(3 * time.Second)
goto TRY_DOWNLOAD
}
defer res.Body.Close()
defer fileWriter.Close()

part.FileWriter = fileWriter
part.copyBuffer(fileWriter, res.Body)
fileWriter.Close()
if err := part.copyBuffer(fileWriter, res.Body); err != nil {
if part.attempt > ReTriesDownload {
part.File.errorListener <- err
return
}
time.Sleep(3 * time.Second)
goto TRY_DOWNLOAD
}
}()

return nil
Expand Down

0 comments on commit db8ca9c

Please sign in to comment.