Exemplo n.º 1
0
func log_tailing(conn *scrapinghub.Connection, job_id string) {
	var jobs scrapinghub.Jobs
	jobinfo, err := jobs.JobInfo(conn, job_id)
	if err != nil {
		log.Fatalf("%s\n", err)
	}
	// Number of log lines in the job
	offset := jobinfo.Logs
	if offset > 0 {
		offset -= 1 // start one line before
	}
	count := 10 // ask for this lines in every call
	ls := scrapinghub.LinesStream{Conn: conn, Count: count, Offset: offset}
	for {
		retrieved := 0
		ch_lines, ch_err := ls.LogLines(job_id)
		for line := range ch_lines {
			retrieved++
			fmt.Fprintf(os.Stdout, "%s\n", line)
		}
		for err := range ch_err {
			log.Fatalf("%s\n", err)
		}
		ls.Offset += retrieved
		time.Sleep(time.Second)
	}
}
Exemplo n.º 2
0
func cmd_log(conn *scrapinghub.Connection, args []string, flags *PFlags) {
	if len(args) < 1 {
		log.Fatalf("Missing argument: <job_id>\n")
	}

	job_id := args[0]
	count := flags.Count
	offset := flags.Offset

	if flags.Tailing {
		log_tailing(conn, job_id)
	} else {
		ls := scrapinghub.LinesStream{Conn: conn, Count: count, Offset: offset}
		ch_lines, ch_err := ls.LogLines(job_id)

		for line := range ch_lines {
			print_out(flags, line)
		}
		for err := range ch_err {
			log.Fatalf("log error: %s\n", err)
		}
	}
}