Exemple #1
0
func log_tailing(conn *scrapinghub.Connection, job_id string) {
	var jobs scrapinghub.Jobs
	jobinfo, err := jobs.JobInfo(conn, job_id)
	if err != nil {
		log.Fatalf("%s\n", err)
	}
	// Number of log lines in the job
	offset := jobinfo.Logs
	if offset > 0 {
		offset -= 1 // start one line before
	}
	count := 10 // ask for this lines in every call
	ls := scrapinghub.LinesStream{Conn: conn, Count: count, Offset: offset}
	for {
		retrieved := 0
		ch_lines, ch_err := ls.LogLines(job_id)
		for line := range ch_lines {
			retrieved++
			fmt.Fprintf(os.Stdout, "%s\n", line)
		}
		for err := range ch_err {
			log.Fatalf("%s\n", err)
		}
		ls.Offset += retrieved
		time.Sleep(time.Second)
	}
}
Exemple #2
0
func cmd_jobs(conn *scrapinghub.Connection, args []string, flags *PFlags) {
	if len(args) < 1 {
		log.Fatalf("Missing argument: <project_id>\n")
	}
	project_id := args[0]
	filters := equality_list_to_map(args[1:])

	count := flags.Count
	offset := flags.Offset

	if flags.AsJsonLines {
		ls := scrapinghub.LinesStream{Conn: conn, Count: count, Offset: offset}
		ch_jobs, errch := ls.JobsAsJsonLines(project_id, filters)
		for line := range ch_jobs {
			fmt.Println(line)
		}
		for err := range errch {
			log.Fatalf("jobs error: %s\n", err)
		}
	} else {
		var jobs scrapinghub.Jobs
		jobs_list, err := jobs.List(conn, project_id, count, filters)
		if err != nil {
			log.Fatalf("jobs error: %s", err)
		}
		outfmt := "| %10s | %25s | %12s | %10s | %10s | %10s | %20s |\n"
		print_out(flags, outfmt, "id", "spider", "state", "items", "errors", "log lines", "started_time")
		print_out(flags, dashes(106))
		for _, j := range jobs_list.Jobs {
			print_out(flags, "| %10s | %25s | %12s | %10d | %10d | %10d | %20s |\n", j.Id, j.Spider, j.State,
				j.ItemsScraped, j.ErrorsCount, j.Logs, j.StartedTime)
		}
	}
}
Exemple #3
0
func cmd_items(conn *scrapinghub.Connection, args []string, flags *PFlags) {
	if len(args) < 1 {
		log.Fatalf("Missing argument: <job_id>\n")
	}

	job_id := args[0]
	count := flags.Count
	offset := flags.Offset
	ls := scrapinghub.LinesStream{Conn: conn, Count: count, Offset: offset}

	if flags.AsJsonLines {
		ch_lines, errch := ls.ItemsAsJsonLines(job_id)

		for line := range ch_lines {
			print_out(flags, line)
		}
		for err := range errch {
			log.Fatalf("items error: %s\n", err)
		}
	} else if flags.AsCSV {
		ch_lines, errch := ls.ItemsAsCSV(job_id, flags.CSVFlags.IncludeHeaders, flags.CSVFlags.Fields)
		for line := range ch_lines {
			print_out(flags, line)
		}
		for err := range errch {
			log.Fatalf("items error: %s\n", err)
		}
	} else {
		items, err := scrapinghub.RetrieveItems(conn, job_id, count, offset)
		if err != nil {
			log.Fatalf("items error: %s\n", err)
		}
		for i, e := range items {
			print_out(flags, "Item %5d %s\n", i, dashes(129))
			for k, v := range e {
				//fmt.Printf("| %-33s | %100s |\n", k, fmt.Sprintf("%v", v))
				print_out(flags, "| %-33s | %100s |\n", k, fmt.Sprintf("%v", v))
			}
			print_out(flags, dashes(140))
		}
	}
}
Exemple #4
0
func cmd_log(conn *scrapinghub.Connection, args []string, flags *PFlags) {
	if len(args) < 1 {
		log.Fatalf("Missing argument: <job_id>\n")
	}

	job_id := args[0]
	count := flags.Count
	offset := flags.Offset

	if flags.Tailing {
		log_tailing(conn, job_id)
	} else {
		ls := scrapinghub.LinesStream{Conn: conn, Count: count, Offset: offset}
		ch_lines, ch_err := ls.LogLines(job_id)

		for line := range ch_lines {
			print_out(flags, line)
		}
		for err := range ch_err {
			log.Fatalf("log error: %s\n", err)
		}
	}
}