diff --git a/bigquery/integration_test.go b/bigquery/integration_test.go index 3a749719432e..72ec14d4072a 100644 --- a/bigquery/integration_test.go +++ b/bigquery/integration_test.go @@ -2762,6 +2762,78 @@ func TestIntegration_ExtractExternal(t *testing.T) { } } +func TestIntegration_ExportDataStatistics(t *testing.T) { + // Create a table, extract it to GCS using EXPORT DATA statement. + if client == nil { + t.Skip("Integration tests skipped") + } + ctx := context.Background() + schema := Schema{ + {Name: "name", Type: StringFieldType}, + {Name: "num", Type: IntegerFieldType}, + } + table := newTable(t, schema) + defer table.Delete(ctx) + + // Extract to a GCS object as CSV. + bucketName := testutil.ProjID() + uri := fmt.Sprintf("gs://%s/bq-export-test-*.csv", bucketName) + defer func() { + it := storageClient.Bucket(bucketName).Objects(ctx, &storage.Query{ + MatchGlob: "bq-export-test-*.csv", + }) + for { + obj, err := it.Next() + if err == iterator.Done { + break + } + if err != nil { + t.Logf("failed to delete bucket: %v", err) + continue + } + err = storageClient.Bucket(bucketName).Object(obj.Name).Delete(ctx) + t.Logf("deleted object %s: %v", obj.Name, err) + } + }() + + // EXPORT DATA to GCS object. + sql := fmt.Sprintf(`EXPORT DATA + OPTIONS ( + uri = '%s', + format = 'CSV', + overwrite = true, + header = true, + field_delimiter = ';' + ) + AS ( + SELECT 'a' as name, 1 as num + UNION ALL + SELECT 'b' as name, 2 as num + UNION ALL + SELECT 'c' as name, 3 as num + );`, + uri) + stats, _, err := runQuerySQL(ctx, sql) + if err != nil { + t.Fatal(err) + } + + qStats, ok := stats.Details.(*QueryStatistics) + if !ok { + t.Fatalf("expected query statistics not present") + } + + if qStats.ExportDataStatistics == nil { + t.Fatal("jobStatus missing ExportDataStatistics") + } + if qStats.ExportDataStatistics.FileCount != 1 { + t.Fatalf("expected ExportDataStatistics to have 1 file, but got %d files", qStats.ExportDataStatistics.FileCount) + } + if qStats.ExportDataStatistics.RowCount != 3 { + t.Fatalf("expected ExportDataStatistics to have 3 rows, got %d rows", qStats.ExportDataStatistics.RowCount) + } +} + func TestIntegration_ReadNullIntoStruct(t *testing.T) { // Reading a null into a struct field should return an error (not panic). if client == nil { diff --git a/bigquery/job.go b/bigquery/job.go index 0ce79c111689..01796eea88df 100644 --- a/bigquery/job.go +++ b/bigquery/job.go @@ -505,6 +505,30 @@ type QueryStatistics struct { // The DDL target table, present only for CREATE/DROP FUNCTION/PROCEDURE queries. DDLTargetRoutine *Routine + + // Statistics for the EXPORT DATA statement as part of Query Job. + ExportDataStatistics *ExportDataStatistics +} + +// ExportDataStatistics represents statistics for +// a EXPORT DATA statement as part of Query Job. +type ExportDataStatistics struct { + // Number of destination files generated. + FileCount int64 + + // Number of destination rows generated. + RowCount int64 +} + +func bqToExportDataStatistics(in *bq.ExportDataStatistics) *ExportDataStatistics { + if in == nil { + return nil + } + stats := &ExportDataStatistics{ + FileCount: in.FileCount, + RowCount: in.RowCount, + } + return stats } // BIEngineStatistics contains query statistics specific to the use of BI Engine. @@ -1028,6 +1052,7 @@ func (j *Job) setStatistics(s *bq.JobStatistics, c *Client) { DDLTargetTable: bqToTable(s.Query.DdlTargetTable, c), DDLOperationPerformed: s.Query.DdlOperationPerformed, DDLTargetRoutine: bqToRoutine(s.Query.DdlTargetRoutine, c), + ExportDataStatistics: bqToExportDataStatistics(s.Query.ExportDataStatistics), StatementType: s.Query.StatementType, TotalBytesBilled: s.Query.TotalBytesBilled, TotalBytesProcessed: s.Query.TotalBytesProcessed,