/
crawl.go
80 lines (72 loc) · 1.91 KB
/
crawl.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
package main
import (
"fmt"
"io/ioutil"
"math/rand"
"net/http"
"strings"
"time"
"github.com/PuerkitoBio/gocrawl"
"github.com/PuerkitoBio/goquery"
)
type CrawlerExtender struct {
gocrawl.DefaultExtender
Section string
outDir string
isSectionLinks bool
skips []string
files chan string
}
func (this *CrawlerExtender) Visit(ctx *gocrawl.URLContext, res *http.Response, doc *goquery.Document) (interface{}, bool) {
if doc.Find(this.Section).Length() == 0 {
fmt.Println("Nothing in this section")
return nil, true
}
section := doc.Find(this.Section)
title := fmt.Sprintf("%v", rand.Int63())
body := section.Text()
for _, skip := range this.skips {
body = strings.Replace(body, skip, "", -1)
}
err := ioutil.WriteFile(this.outDir+"/"+title[:]+".txt", []byte(body), 0644)
if err == nil {
this.files <- this.outDir + "/" + title[:] + ".txt"
}
if this.isSectionLinks {
aTags := section.Find("a")
links := make([]string, 10)
for i := range aTags.Nodes {
link, _ := aTags.Eq(i).Attr("href")
links = append(links, link)
}
return links, false
} else {
return nil, true
}
}
func crawlSite(siteConfig SiteConfig) <-chan string {
files := make(chan string, 10)
crawler := new(CrawlerExtender)
crawler.files = files
crawler.Section = siteConfig.Section
crawler.outDir = outDir
crawler.skips = siteConfig.Skip
crawler.isSectionLinks = siteConfig.IsSectionLinks
opts := gocrawl.NewOptions(crawler)
opts.CrawlDelay = 1 * time.Second
opts.MaxVisits = siteConfig.Depth
c := gocrawl.NewCrawlerWithOptions(opts)
go func() {
defer close(files)
c.Run(siteConfig.Url)
}()
return files
}
func crawlAlllSites(config *Config) []<-chan string {
fmt.Printf("No of sites to crawl : %d\n", len(config.Sites))
fileChannels := make([]<-chan string, len(config.Sites))
for i, siteConfig := range config.Sites {
fileChannels[i] = crawlSite(siteConfig)
}
return fileChannels
}