示例#1
0
文件: redis.go 项目: aosen/spiders
func main() {
	start_url := "http://www.jiexieyin.org"
	thread_num := uint(16)

	redisAddr := "127.0.0.1:6379"
	redisMaxConn := 10
	redisMaxIdle := 10

	proc := &MyProcessor{}

	sp := robot.NewSpider(proc, "redis_scheduler_example").
		//SetSleepTime("fixed", 6000, 6000).
		//SetScheduler(scheduler.NewQueueScheduler(true)).
		SetScheduler(robot.NewRedisScheduler(redisAddr, redisMaxConn, redisMaxIdle, true)).
		AddPipeline(robot.NewPipelineConsole()).
		SetThreadnum(thread_num)

	init := false
	for _, arg := range os.Args {
		if arg == "--init" {
			init = true
			break
		}
	}
	if init {
		sp.AddUrl(start_url, "html")
		mlog.LogInst().LogInfo("重新开始爬")
	} else {
		mlog.LogInst().LogInfo("继续爬")
	}
	sp.Run()
}
示例#2
0
文件: github.go 项目: aosen/spiders
func main() {
	// Spider input:
	//  PageProcesser ;
	//  Task name used in Pipeline for record;
	robot.NewSpider(NewMyPageProcesser(), "TaskName").
		AddUrl("https://github.com/hu17889?tab=repositories", "html"). // Start url, html is the responce type ("html" or "json" or "jsonp" or "text")
		AddPipeline(robot.NewPipelineConsole()).                       // Print result on screen
		SetThreadnum(3).                                               // Crawl request by three Coroutines
		Run()
}
示例#3
0
文件: weixin.go 项目: aosen/spiders
func main() {
	// Spider input:
	//  PageProcesser ;
	//  Task name used in Pipeline for record;
	req_url := "http://weixin.sogou.com/weixin?query=%E4%BA%91%E6%B5%AE&type=1&page=1&ie=utf8"
	robot.NewSpider(NewMyPageProcesser(), "TaskName").
		AddUrlWithHeaderFile(req_url, "html", "weixin.sogou.com.json"). // Start url, html is the responce type ("html" or "json" or "jsonp" or "text")
		AddPipeline(robot.NewPipelineConsole()).                        // Print result on screen
		SetThreadnum(3).                                                // Crawl request by three Coroutines
		Run()
}
示例#4
0
文件: sinajson.go 项目: aosen/spiders
func main() {
	// spider input:
	//  PageProcesser ;
	//  task name used in Pipeline for record;
	robot.NewSpider(NewMyPageProcesser(), "sina_stock_news").
		AddUrl("http://live.sina.com.cn/zt/api/l/get/finance/globalnews1/index.htm?format=json&id=63621&pagesize=10&dire=f", "json"). // start url, html is the responce type ("html" or "json" or "jsonp" or "text")
		AddPipeline(robot.NewPipelineConsole()).                                                                                      // Print result to std output
		AddPipeline(robot.NewPipelineFile("./sinafile")).                                                                             // Print result in file
		OpenFileLog("./").                                                                                                            // Error info or other useful info in spider will be logged in file of defalt path like "WD/log/log.2014-9-1".
		SetSleepTime("rand", 1000, 3000).                                                                                             // Sleep time between 1s and 3s.
		Run()
	//AddPipeline(pipeline.NewPipelineFile("/tmp/tmpfile")). // print result in file
}