// Spider is scheduler module for all the other modules, like downloader, pipeline, scheduler and etc. // The taskname could be empty string too, or it can be used in Pipeline for record the result crawled by which task; func NewSpider(pageinst page_processer.PageProcesser, taskname string) *Spider { mlog.StraceInst().Open() ap := &Spider{taskname: taskname, pPageProcesser: pageinst} // init filelog. ap.CloseFileLog() ap.exitWhenComplete = true ap.sleeptype = "fixed" ap.startSleeptime = 0 // init spider if ap.pScheduler == nil { ap.SetScheduler(scheduler.NewQueueScheduler(false)) } if ap.pDownloader == nil { ap.SetDownloader(downloader.NewHttpDownloader()) } mlog.StraceInst().Println("** start spider **") ap.pPiplelines = make([]pipeline.Pipeline, 0) return ap }
func TestDownloadJson(t *testing.T) { var req *request.Request req = request.NewRequest("http://live.sina.com.cn/zt/api/l/get/finance/globalnews1/index.htm?format=json&id=23521&pagesize=4&dire=f&dpc=1", "json") var dl downloader.Downloader dl = downloader.NewHttpDownloader() var p *page.Page p = dl.Download(req) var jsonMap interface{} jsonMap = p.GetJsonMap() fmt.Printf("%v", jsonMap) //fmt.Println(doc) //body := p.GetBodyStr() //fmt.Println(body) }
func TestDownloadHtml(t *testing.T) { //return //request := request.NewRequest("http://live.sina.com.cn/zt/api/l/get/finance/globalnews1/index.htm?format=json&callback=t13975294&id=23521&pagesize=45&dire=f&dpc=1") var req *request.Request req = request.NewRequest("http://live.sina.com.cn/zt/l/v/finance/globalnews1/", "html", "", "GET", "", nil, nil, nil, nil) var dl downloader.Downloader dl = downloader.NewHttpDownloader() var p *page.Page p = dl.Download(req) var doc *goquery.Document doc = p.GetHtmlParser() //fmt.Println(doc) //body := p.GetBodyStr() //fmt.Println(body) var s *goquery.Selection s = doc.Find("body") if s.Length() < 1 { t.Error("html parse failed!") } /* doc, err := goquery.NewDocument("http://live.sina.com.cn/zt/l/v/finance/globalnews1/") if err != nil { fmt.Printf("%v",err) } s := doc.Find("meta"); fmt.Println(s.Length()) resp, err := http.Get("http://live.sina.com.cn/zt/l/v/finance/globalnews1/") if err != nil { fmt.Printf("%v",err) } defer resp.Body.Close() doc, err = goquery.NewDocumentFromReader(resp.Body) s = doc.Find("meta"); fmt.Println(s.Length()) */ }
func TestCharSetChange(t *testing.T) { var req *request.Request //req = request.NewRequest("http://stock.finance.sina.com.cn/usstock/api/jsonp.php/t/US_CategoryService.getList?page=1&num=60", "jsonp") req = request.NewRequest("http://soft.chinabyte.com/416/13164916.shtml", "html", "", "GET", "", nil, nil, nil, nil) var dl downloader.Downloader dl = downloader.NewHttpDownloader() var p *page.Page p = dl.Download(req) //hp := p.GetHtmlParser() //fmt.Printf("%v", jsonMap) //fmt.Println(doc) p.GetBodyStr() body := p.GetBodyStr() fmt.Println(body) }
func (this *Spider) close() { this.SetScheduler(scheduler.NewQueueScheduler(false)) this.SetDownloader(downloader.NewHttpDownloader()) this.pPiplelines = make([]pipeline.Pipeline, 0) this.exitWhenComplete = true }