-
Notifications
You must be signed in to change notification settings - Fork 3
/
crawler.go
118 lines (94 loc) · 2.69 KB
/
crawler.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
package main
import (
"github.com/pkg/errors"
"log"
"net/url"
"os"
"time"
)
type CrawlOptions struct {
NumberOfConcurrentRequests int
Timeout time.Duration
UserAgent string
LogFile string
}
func crawl(xmlSitemapURL url.URL, options CrawlOptions, stop chan bool) error {
// read the XML sitemap as a initial source for URLs
urlsFromXMLSitemap, err := getURLs(xmlSitemapURL, "gargantua bot")
if err != nil {
return err
}
// the URL queue
urls := make(chan crawlerUrl, len(urlsFromXMLSitemap))
// fill the URL queue with the URLs from the XML sitemap
for _, xmlSitemapURLEntry := range urlsFromXMLSitemap {
urls <- xmlSitemapURLEntry
}
results := make(chan WorkResult)
// send new urls to the work queue
workers := make(chan int, options.NumberOfConcurrentRequests)
for workerID := 1; workerID <= options.NumberOfConcurrentRequests; workerID++ {
workers <- workerID
}
allURLsHaveBeenVisited := make(chan bool)
go func() {
var visitedURLs = make(map[string]crawlerUrl)
for {
select {
case <-stop:
allURLsHaveBeenVisited <- true
return
case targetURL := <-urls:
// skip URLs we have already seen
_, alreadyVisited := visitedURLs[targetURL.getUrl()]
if alreadyVisited {
continue
}
// mark the URL as visited
visitedURLs[targetURL.getUrl()] = targetURL
debugf("Sending URL to work queue: %s", targetURL.String())
go func() {
workerID := <-workers
debugf("Using worker %d for URL %q", workerID, targetURL.String())
results <- executeWork(workerID, cap(workers), targetURL, options.UserAgent, urls)
debugf("Worker %d finished processing URL %q", workerID, targetURL.String())
workers <- workerID
}()
case <-time.After(time.Second * 1):
if len(workers) == cap(workers) && len(urls) == 0 {
allURLsHaveBeenVisited <- true
return
}
}
}
}()
var logger *log.Logger
if options.LogFile != "" {
file, err := os.OpenFile(options.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666)
if err != nil {
return errors.Wrapf(err, "failed to open log file %q for writing", options.LogFile)
}
defer file.Close()
logger = log.New(file, "", log.Ldate|log.Ltime)
}
// update the statistics with the results
allStatisticsHaveBeenUpdated := make(chan bool)
go func() {
for {
select {
case <-allURLsHaveBeenVisited:
allStatisticsHaveBeenUpdated <- true
return
case result := <-results:
receivedUrl := result.URL()
debugf("Received results for URL %q", receivedUrl.String())
updateStatistics(result)
if logger != nil {
logResult(logger, result)
}
}
}
}()
<-allStatisticsHaveBeenUpdated
return nil
}