示例#1
0
func main() {
	flag.Parse()
	// setEnvVars()

	client, err := GoogleClient()

	if err == nil {
		bq, _ := bigquery.New(client)
		dsr := new(bigquery.DatasetReference)
		dsr.DatasetId = *bqSourceDataset
		dsr.ProjectId = *bqSourceProject

		// request := new(bigquery.QueryRequest)
		// request.DefaultDataset = dsr
		// request.Query = "SELECT count(*) FROM []"

		// call := bq.Jobs.Query("", request)

		// resp, err := call.Do()

		// jobs := new(bigquery.JobsService)
		// job := jobs.Query("sapient-catbird-547", request)
		// resp, err := job.Do()
		// fmt.Print(resp.CacheHit, resp.JobReference, err)

		// jobId := resp.JobReference.JobId

		// s, _ := bq.Jobs.GetQueryResults("", jobId).Do()

		// buf, _ := json.Marshal(s)
		// fmt.Println(s, string(buf), "\n\n\n")

		tabr := new(bigquery.TableReference)
		tabr.DatasetId = *bqDestDataset
		tabr.ProjectId = *bqDestProject
		tabr.TableId = "temp_grouped_v2"

		// jcq := new(bigquery.JobConfigurationQuery)
		// jcq.DestinationTable = tabr
		// jcq.Priority = "BATCH"
		// jcq.WriteDisposition = "WRITE_TRUNCATE"
		// jcq.Query = "SELECT ap_mac, COUNT(DISTINCT(client_mac)), DATE(TIMESTAMP(first_seen)) date FROM [dev_sense_v1.sensev4_ct] GROUP BY ap_mac, date"

		// jc := new(bigquery.JobConfiguration)
		// jc.Query = jcq

		// job := new(bigquery.Job)
		// job.Configuration = jc

		// aa, err := bq.Jobs.Insert(*bqSourceProject, job).Do()
		// if err == nil {
		// 	fmt.Print(aa.Id)
		// } else {
		// 	fmt.Print(err)
		// }

		jce := new(bigquery.JobConfigurationExtract)
		jce.DestinationFormat = "csv"
		jce.DestinationUri = "gs://ct_temp/151028.csv"
		jce.SourceTable = tabr

		extractJc := new(bigquery.JobConfiguration)
		extractJc.Extract = jce
		extractJob := new(bigquery.Job)
		extractJob.Configuration = extractJc

		aa, err := bq.Jobs.Insert(*bqSourceProject, extractJob).Do()
		if err == nil {
			fmt.Print(aa.Id)
		} else {
			fmt.Print(err)
		}

	}

}
示例#2
0
文件: client.go 项目: gomaps/bigquery
// largeDataPagedQuery builds a job and inserts it into the job queue allowing the flexibility to set the custom AllowLargeResults flag for the job
func (c *Client) largeDataPagedQuery(service *bigquery.Service, pageSize int, dataset, project, queryStr string, dataChan chan Data) ([][]interface{}, []string, error) {
	c.printDebug("largeDataPagedQuery starting")
	ts := time.Now()
	// start query
	tableRef := bigquery.TableReference{DatasetId: dataset, ProjectId: project, TableId: c.tempTableName}
	jobConfigQuery := bigquery.JobConfigurationQuery{}

	datasetRef := &bigquery.DatasetReference{
		DatasetId: dataset,
		ProjectId: project,
	}

	jobConfigQuery.AllowLargeResults = true
	jobConfigQuery.Query = queryStr
	jobConfigQuery.DestinationTable = &tableRef
	jobConfigQuery.DefaultDataset = datasetRef
	if !c.flattenResults {
		c.printDebug("setting FlattenResults to false")
		// need a pointer to bool
		f := false
		jobConfigQuery.FlattenResults = &f
	}
	jobConfigQuery.WriteDisposition = "WRITE_TRUNCATE"
	jobConfigQuery.CreateDisposition = "CREATE_IF_NEEDED"

	jobConfig := bigquery.JobConfiguration{}

	jobConfig.Query = &jobConfigQuery

	job := bigquery.Job{}
	job.Configuration = &jobConfig

	jobInsert := service.Jobs.Insert(project, &job)
	runningJob, jerr := jobInsert.Do()

	if jerr != nil {
		c.printDebug("Error inserting job!", jerr)
		if dataChan != nil {
			dataChan <- Data{Err: jerr}
		}
		return nil, nil, jerr
	}

	qr, err := service.Jobs.GetQueryResults(project, runningJob.JobReference.JobId).Do()

	if err != nil {
		c.printDebug("Error loading query: ", err)
		if dataChan != nil {
			dataChan <- Data{Err: err}
		}
		return nil, nil, err
	}

	var headers []string
	rows := [][]interface{}{}

	// if query is completed process, otherwise begin checking for results
	if qr.JobComplete {
		c.printDebug("job complete, got rows", len(qr.Rows))
		headers, rows = c.headersAndRows(qr.Schema, qr.Rows)
		if dataChan != nil {
			dataChan <- Data{Headers: headers, Rows: rows}
		}
	}

	if !qr.JobComplete {
		resultChan := make(chan [][]interface{})
		headersChan := make(chan []string)

		go c.pageOverJob(len(rows), runningJob.JobReference, qr.PageToken, resultChan, headersChan)

	L:
		for {
			select {
			case h, ok := <-headersChan:
				if ok {
					c.printDebug("got headers")
					headers = h
				}
			case newRows, ok := <-resultChan:
				if !ok {
					break L
				}
				if dataChan != nil {
					c.printDebug("got rows", len(newRows))
					dataChan <- Data{Headers: headers, Rows: newRows}
				} else {
					rows = append(rows, newRows...)
				}
			}
		}
	}

	if dataChan != nil {
		close(dataChan)
	}
	c.printDebug("largeDataPagedQuery completed in ", time.Now().Sub(ts).Seconds(), "s")
	return rows, headers, nil
}