/
crawler.go
36 lines (32 loc) · 860 Bytes
/
crawler.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
package main
import (
"flag"
"fmt"
"log"
"net/url"
"github.com/ksheremet/crawler/crawler"
)
func main() {
// Init values for the standart logger
// Lshortfile - file name and file number
log.SetFlags(log.Lshortfile)
var adr = flag.String("url", "http://xmpp.org", "http address")
var depth = flag.Int("depth", 5, "depth of searching")
var search = flag.Bool("search", false, "search in all hostname")
var parallel = flag.Bool("parallel", false, "perfom in parallel")
flag.Parse()
log.Println("get properties from command line",
*adr, *depth, *search, *parallel)
u, err := url.Parse(*adr)
if err != nil {
log.Println("Bad link", err)
} else {
//Create struct Crawler
c := crawler.NewCrawler(*depth, *search, *parallel)
c.Crawl(u)
log.Println("print links")
for val := range c.GetResult().Iter() {
fmt.Println(val)
}
}
}