Exemplo n.º 1
0
func Test_Scrape(t *testing.T) {
	ts := setupSinglePage()
	defer ts.Close()
	page, _ := crawler.Crawl(ts.URL)
	if len(page.Links) > 0 {
		t.Errorf("Page should not have links.")
	}
}
Exemplo n.º 2
0
func Benchmark_Scrape5(b *testing.B) {

	b.StopTimer()
	ts := setupFiveSubpages()
	defer ts.Close()
	b.StartTimer()

	for i := 0; i < b.N; i++ {
		crawler.Crawl(ts.URL)
	}
}
Exemplo n.º 3
0
func Test_ScrapeCircular(t *testing.T) {
	ts := setupCircularSubpages()
	defer ts.Close()

	page, _ := crawler.Crawl(ts.URL)
	if len(page.Links) != 1 {
		t.Errorf("Page should have 1 subpage.")
	}
	for _, p := range page.Links {
		if len(p.Links) != 1 {
			t.Errorf("Subpage should have 1 link.")
		}
	}
}
Exemplo n.º 4
0
func Test_Scrape5(t *testing.T) {
	ts := setupFiveSubpages()
	defer ts.Close()

	page, _ := crawler.Crawl(ts.URL)
	if len(page.Links) != 5 {
		t.Errorf("Page should have 5 subpages.")
	}
	for _, p := range page.Links {
		if len(p.Links) > 0 {
			t.Errorf("Subpage should have no links.")
		}
	}
}
Exemplo n.º 5
0
func crawl(url string, saveResults bool) {
	start := time.Now()
	page, job := crawler.Crawl(url)
	stop := time.Now()

	duration := stop.Sub(start)
	fmt.Printf("Starting from %s, crawled %d pages in %s\n", page.URL, job.PagesCrawled, duration)

	if saveResults {
		save(page)
		//a := graph(page)
		//filename := "/tmp/" + page.Path + ".dot"
		//ioutil.WriteFile(filename, []byte(a.String()), 0755)
		//fmt.Printf("Your .dot file is at: %s\n", filename)
	}
}