// Query returns a slice of the results of a query. func Query(proj, q string) ([][]bigquery.Value, error) { ctx := context.Background() client, err := bigquery.NewClient(ctx, proj) if err != nil { return nil, err } query := client.Query(q) iter, err := query.Read(ctx) if err != nil { return nil, err } var rows [][]bigquery.Value for { var row []bigquery.Value err := iter.Next(&row) if err == iterator.Done { return rows, nil } if err != nil { return nil, err } rows = append(rows, row) } }
func ExampleTable_LoaderFrom_reader() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } f, err := os.Open("data.csv") if err != nil { // TODO: Handle error. } rs := bigquery.NewReaderSource(f) rs.AllowJaggedRows = true // TODO: set other options on the GCSReference. ds := client.Dataset("my_dataset") loader := ds.Table("my_table").LoaderFrom(rs) loader.CreateDisposition = bigquery.CreateNever // TODO: set other options on the Loader. job, err := loader.Run(ctx) if err != nil { // TODO: Handle error. } status, err := job.Wait(ctx) if err != nil { // TODO: Handle error. } if status.Err() != nil { // TODO: Handle error. } }
func ExampleRowIterator_Next_struct() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } type score struct { Name string Num int } q := client.Query("select name, num from t1") it, err := q.Read(ctx) if err != nil { // TODO: Handle error. } for { var s score err := it.Next(&s) if err == iterator.Done { break } if err != nil { // TODO: Handle error. } fmt.Println(s) } }
func ExampleTable_LoaderFrom() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } gcsRef := bigquery.NewGCSReference("gs://my-bucket/my-object") gcsRef.AllowJaggedRows = true // TODO: set other options on the GCSReference. ds := client.Dataset("my_dataset") loader := ds.Table("my_table").LoaderFrom(gcsRef) loader.CreateDisposition = bigquery.CreateNever // TODO: set other options on the Loader. job, err := loader.Run(ctx) if err != nil { // TODO: Handle error. } status, err := job.Wait(ctx) if err != nil { // TODO: Handle error. } if status.Err() != nil { // TODO: Handle error. } }
func ExampleTable_ExtractorTo() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } gcsRef := bigquery.NewGCSReference("gs://my-bucket/my-object") gcsRef.FieldDelimiter = ":" // TODO: set other options on the GCSReference. ds := client.Dataset("my_dataset") extractor := ds.Table("my_table").ExtractorTo(gcsRef) extractor.DisableHeader = true // TODO: set other options on the Extractor. job, err := extractor.Run(ctx) if err != nil { // TODO: Handle error. } status, err := job.Wait(ctx) if err != nil { // TODO: Handle error. } if status.Err() != nil { // TODO: Handle error. } }
func main() { flag.Parse() flagsOk := true for _, f := range []string{"project", "dataset", "q"} { if flag.Lookup(f).Value.String() == "" { fmt.Fprintf(os.Stderr, "Flag --%s is required\n", f) flagsOk = false } } if !flagsOk { os.Exit(1) } ctx := context.Background() client, err := bigquery.NewClient(ctx, *project) if err != nil { log.Fatalf("Creating bigquery client: %v", err) } query := client.Query(*q) query.DefaultProjectID = *project query.DefaultDatasetID = *dataset query.WriteDisposition = bigquery.WriteTruncate if *dest != "" { query.Dst = client.Dataset(*dataset).Table(*dest) } // Query data. job, err := query.Run(ctx) if err != nil { log.Fatalf("Querying: %v", err) } fmt.Printf("Submitted query. Job ID: %s\n", job.ID()) if !*wait { return } fmt.Printf("Waiting for job to complete.\n") for range time.Tick(*pollint) { status, err := job.Status(ctx) if err != nil { fmt.Printf("Failure determining status: %v", err) break } if !status.Done() { continue } if err := status.Err(); err == nil { fmt.Printf("Success\n") } else { fmt.Printf("Failure: %+v\n", err) } break } }
func ExampleNewClient() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } _ = client // TODO: Use client. }
func ExampleClient_DatasetInProject() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } ds := client.DatasetInProject("their-project-id", "their-dataset") fmt.Println(ds) }
func ExampleTable_Uploader() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } u := client.Dataset("my_dataset").Table("my_table").Uploader() _ = u // TODO: Use u. }
func ExampleClient_DatasetsInProject() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } it := client.DatasetsInProject(ctx, "their-project-id") _ = it // TODO: iterate using Next or iterator.Pager. }
func ExampleTable_Read() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } it := client.Dataset("my_dataset").Table("my_table").Read(ctx) _ = it // TODO: iterate using Next or iterator.Pager. }
func main() { flag.Parse() flagsOk := true if flag.Lookup("project").Value.String() == "" { fmt.Fprintf(os.Stderr, "Flag --project is required\n") flagsOk = false } var sourceFlagCount int if flag.Lookup("dataset").Value.String() != "" { sourceFlagCount++ } if flag.Lookup("jobid").Value.String() != "" { sourceFlagCount++ } if sourceFlagCount != 1 { fmt.Fprintf(os.Stderr, "Exactly one of --dataset or --jobid must be set\n") flagsOk = false } if !flagsOk { os.Exit(1) } ctx := context.Background() tableRE, err := regexp.Compile(*table) if err != nil { fmt.Fprintf(os.Stderr, "--table is not a valid regular expression: %q\n", *table) os.Exit(1) } client, err := bigquery.NewClient(ctx, *project) if err != nil { log.Fatalf("Creating bigquery client: %v", err) } if *jobID != "" { printQueryResults(ctx, client, *jobID) return } ds := client.Dataset(*dataset) tableIter := ds.Tables(context.Background()) for { t, err := tableIter.Next() if err == iterator.Done { break } if err != nil { log.Fatalf("Listing tables: %v", err) } if tableRE.MatchString(t.TableID) { printTable(ctx, client, t) } } }
func main() { flag.Parse() flagsOk := true for _, f := range []string{"project", "dataset", "table", "bucket", "object"} { if flag.Lookup(f).Value.String() == "" { fmt.Fprintf(os.Stderr, "Flag --%s is required\n", f) flagsOk = false } } if !flagsOk { os.Exit(1) } ctx := context.Background() client, err := bigquery.NewClient(ctx, *project) if err != nil { log.Fatalf("Creating bigquery client: %v", err) } table := client.Dataset(*dataset).Table(*table) gcs := bigquery.NewGCSReference(fmt.Sprintf("gs://%s/%s", *bucket, *object)) gcs.SkipLeadingRows = *skiprows gcs.MaxBadRecords = 1 gcs.AllowQuotedNewlines = true // Load data from Google Cloud Storage into a BigQuery table. loader := table.LoaderFrom(gcs) loader.WriteDisposition = bigquery.WriteTruncate job, err := loader.Run(ctx) if err != nil { log.Fatalf("Loading data: %v", err) } fmt.Printf("Job for data load operation: %+v\n", job) fmt.Printf("Waiting for job to complete.\n") for range time.Tick(*pollint) { status, err := job.Status(ctx) if err != nil { fmt.Printf("Failure determining status: %v", err) break } if !status.Done() { continue } if err := status.Err(); err == nil { fmt.Printf("Success\n") } else { fmt.Printf("Failure: %+v\n", err) } break } }
func ExampleTable_Delete() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } if err := client.Dataset("my_dataset").Table("my_table").Delete(ctx); err != nil { // TODO: Handle error. } }
func ExampleDataset_Create() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } if err := client.Dataset("new-dataset").Create(ctx); err != nil { // TODO: Handle error. } }
func ExampleTable_Uploader_options() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } u := client.Dataset("my_dataset").Table("my_table").Uploader() u.SkipInvalidRows = true u.IgnoreUnknownValues = true _ = u // TODO: Use u. }
func ExampleDataset_Table() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } // Table creates a reference to the table. It does not create the actual // table in BigQuery; to do so, use Table.Create. t := client.Dataset("my_dataset").Table("my_table") fmt.Println(t) }
func ExampleClient_Query() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } q := client.Query("select name, num from t1") q.DefaultProjectID = "project-id" // TODO: set other options on the Query. // TODO: Call Query.Run or Query.Read. }
func main() { flag.Parse() flagsOk := true for _, f := range []string{"project", "dataset", "src1", "src2", "dest"} { if flag.Lookup(f).Value.String() == "" { fmt.Fprintf(os.Stderr, "Flag --%s is required\n", f) flagsOk = false } } if !flagsOk { os.Exit(1) } if *src1 == *src2 || *src1 == *dest || *src2 == *dest { log.Fatalf("Different values must be supplied for each of --src1, --src2 and --dest") } ctx := context.Background() client, err := bigquery.NewClient(ctx, *project) if err != nil { log.Fatalf("Creating bigquery client: %v", err) } s1 := client.Dataset(*dataset).Table(*src1) s2 := client.Dataset(*dataset).Table(*src2) d := client.Dataset(*dataset).Table(*dest) // Concatenate data. job, err := client.Copy(ctx, d, bigquery.Tables{s1, s2}, bigquery.WriteTruncate) if err != nil { log.Fatalf("Concatenating: %v", err) } fmt.Printf("Job for concatenation operation: %+v\n", job) fmt.Printf("Waiting for job to complete.\n") for range time.Tick(*pollint) { status, err := job.Status(ctx) if err != nil { fmt.Printf("Failure determining status: %v", err) break } if !status.Done() { continue } if err := status.Err(); err == nil { fmt.Printf("Success\n") } else { fmt.Printf("Failure: %+v\n", err) } break } }
func ExampleTable_Metadata() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } md, err := client.Dataset("my_dataset").Table("my_table").Metadata(ctx) if err != nil { // TODO: Handle error. } fmt.Println(md) }
func TestAll(t *testing.T) { tc := testutil.SystemTest(t) ctx := context.Background() client, err := bigquery.NewClient(ctx, tc.ProjectID) if err != nil { t.Fatal(err) } datasetID := fmt.Sprintf("golang_example_dataset_%d", time.Now().Unix()) if err := createDataset(client, datasetID); err != nil { t.Errorf("failed to create dataset: %v", err) } if err := listDatasets(client); err != nil { t.Errorf("failed to create dataset: %v", err) } tableID := fmt.Sprintf("golang_example_table_%d", time.Now().Unix()) if err := createTable(client, datasetID, tableID); err != nil { t.Errorf("failed to create table: %v", err) } buf := &bytes.Buffer{} if err := listTables(client, buf, datasetID); err != nil { t.Errorf("failed to list tables: %v", err) } if got := buf.String(); !strings.Contains(got, tableID) { t.Errorf("want table list %q to contain table %q", got, tableID) } if err := insertRows(client, datasetID, tableID); err != nil { t.Errorf("failed to insert rows: %v", err) } if err := listRows(client, datasetID, tableID); err != nil { t.Errorf("failed to list rows: %v", err) } if err := browseTable(client, datasetID, tableID); err != nil { t.Errorf("failed to list rows: %v", err) } if err := asyncQuery(client, datasetID, tableID); err != nil { t.Errorf("failed to async query: %v", err) } dstTableID := fmt.Sprintf("golang_example_tabledst_%d", time.Now().Unix()) if err := copyTable(client, datasetID, tableID, dstTableID); err != nil { t.Errorf("failed to copy table: %v", err) } if err := deleteTable(client, datasetID, tableID); err != nil { t.Errorf("failed to delete table: %v", err) } if err := deleteTable(client, datasetID, dstTableID); err != nil { t.Errorf("failed to delete table: %v", err) } deleteDataset(t, ctx, datasetID) }
func TestImportExport(t *testing.T) { tc := testutil.SystemTest(t) ctx := context.Background() client, err := bigquery.NewClient(ctx, tc.ProjectID) if err != nil { t.Fatal(err) } storageClient, err := storage.NewClient(ctx) if err != nil { t.Fatal(err) } datasetID := fmt.Sprintf("golang_example_dataset_importexport_%d", time.Now().Unix()) tableID := fmt.Sprintf("golang_example_dataset_importexport_%d", time.Now().Unix()) if err := createDataset(client, datasetID); err != nil { t.Errorf("failed to create dataset: %v", err) } schema := bigquery.Schema{ &bigquery.FieldSchema{Name: "Year", Type: bigquery.IntegerFieldType}, &bigquery.FieldSchema{Name: "City", Type: bigquery.StringFieldType}, } if err := client.Dataset(datasetID).Table(tableID).Create(ctx, schema); err != nil { t.Errorf("failed to create dataset: %v", err) } defer deleteDataset(t, ctx, datasetID) if err := importFromFile(client, datasetID, tableID, "testdata/olympics.csv"); err != nil { t.Fatalf("failed to import from file: %v", err) } bucket := fmt.Sprintf("golang-example-bigquery-importexport-bucket-%d", time.Now().Unix()) const object = "values.csv" if err := storageClient.Bucket(bucket).Create(ctx, tc.ProjectID, nil); err != nil { t.Fatalf("cannot create bucket: %v", err) } gcsURI := fmt.Sprintf("gs://%s/%s", bucket, object) if err := exportToGCS(client, datasetID, tableID, gcsURI); err != nil { t.Errorf("failed to export to %v: %v", gcsURI, err) } // Cleanup the bucket and object. if err := storageClient.Bucket(bucket).Object(object).Delete(ctx); err != nil { t.Errorf("failed to cleanup the GCS object: %v", err) } time.Sleep(time.Second) // Give it a second, due to eventual consistency. if err := storageClient.Bucket(bucket).Delete(ctx); err != nil { t.Errorf("failed to cleanup the GCS bucket: %v", err) } }
func ExampleQuery_Read() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } q := client.Query("select name, num from t1") it, err := q.Read(ctx) if err != nil { // TODO: Handle error. } _ = it // TODO: iterate using Next or iterator.Pager. }
func ExampleClient_Query_parameters() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } q := client.Query("select num from t1 where name = @user") q.Parameters = []bigquery.QueryParameter{ {Name: "user", Value: "Elizabeth"}, } // TODO: set other options on the Query. // TODO: Call Query.Run or Query.Read. }
func ExampleClient_JobFromID() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } jobID := getJobID() // Get a job ID using Job.ID, the console or elsewhere. job, err := client.JobFromID(ctx, jobID) if err != nil { // TODO: Handle error. } fmt.Println(job) }
func ExampleTable_Update() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } t := client.Dataset("my_dataset").Table("my_table") tm, err := t.Update(ctx, bigquery.TableMetadataToUpdate{ Description: "my favorite table", }) if err != nil { // TODO: Handle error. } fmt.Println(tm) }
func ExampleTable_Create_schema() { ctx := context.Background() // Infer table schema from a Go type. schema, err := bigquery.InferSchema(Item{}) if err != nil { // TODO: Handle error. } client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } t := client.Dataset("my_dataset").Table("new-table") if err := t.Create(ctx, schema); err != nil { // TODO: Handle error. } }
func ExampleUploader_Put() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } u := client.Dataset("my_dataset").Table("my_table").Uploader() // Item implements the ValueSaver interface. items := []*Item{ {Name: "n1", Size: 32.6, Count: 7}, {Name: "n2", Size: 4, Count: 2}, {Name: "n3", Size: 101.5, Count: 1}, } if err := u.Put(ctx, items); err != nil { // TODO: Handle error. } }
// query returns a slice of the results of a query. func query(proj string) (*bigquery.RowIterator, error) { ctx := context.Background() client, err := bigquery.NewClient(ctx, proj) if err != nil { return nil, err } query := client.Query( `SELECT APPROX_TOP_COUNT(corpus, 10) as title, COUNT(*) as unique_words FROM ` + "`publicdata.samples.shakespeare`;") // Use standard SQL syntax for queries. // See: https://cloud.google.com/bigquery/sql-reference/ query.QueryConfig.UseStandardSQL = true return query.Read(ctx) }
func ExampleTableIterator_Next() { ctx := context.Background() client, err := bigquery.NewClient(ctx, "project-id") if err != nil { // TODO: Handle error. } it := client.Dataset("my_dataset").Tables(ctx) for { t, err := it.Next() if err == iterator.Done { break } if err != nil { // TODO: Handle error. } fmt.Println(t) } }