diff --git a/tests/bigquery/bigquery_integration_test.go b/tests/bigquery/bigquery_integration_test.go index 0c454228a8b6..3220b00859a8 100644 --- a/tests/bigquery/bigquery_integration_test.go +++ b/tests/bigquery/bigquery_integration_test.go @@ -75,6 +75,9 @@ func initBigQueryConnection(project string) (*bigqueryapi.Client, error) { func TestBigQueryToolEndpoints(t *testing.T) { sourceConfig := getBigQueryVars(t) + uniqueID := strings.ReplaceAll(uuid.New().String(), "-", "") + t.Logf("Starting test with uniqueID: %s", uniqueID) + ctx, cancel := context.WithTimeout(context.Background(), 20*time.Minute) defer cancel() @@ -86,8 +89,8 @@ func TestBigQueryToolEndpoints(t *testing.T) { } // create table name with UUID - datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) - tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", "")) + datasetName := fmt.Sprintf("temp_toolbox_test_%s", uniqueID) + tableName := fmt.Sprintf("param_table_%s", uniqueID) tableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, datasetName, @@ -96,54 +99,54 @@ func TestBigQueryToolEndpoints(t *testing.T) { tableNameAuth := fmt.Sprintf("`%s.%s.auth_table_%s`", BigqueryProject, datasetName, - strings.ReplaceAll(uuid.New().String(), "-", ""), + uniqueID, ) tableNameTemplateParam := fmt.Sprintf("`%s.%s.template_param_table_%s`", BigqueryProject, datasetName, - strings.ReplaceAll(uuid.New().String(), "-", ""), + uniqueID, ) tableNameDataType := fmt.Sprintf("`%s.%s.datatype_table_%s`", BigqueryProject, datasetName, - strings.ReplaceAll(uuid.New().String(), "-", ""), + uniqueID, ) tableNameForecast := fmt.Sprintf("`%s.%s.forecast_table_%s`", BigqueryProject, datasetName, - strings.ReplaceAll(uuid.New().String(), "-", ""), + uniqueID, ) tableNameAnalyzeContribution := fmt.Sprintf("`%s.%s.analyze_contribution_table_%s`", BigqueryProject, datasetName, - strings.ReplaceAll(uuid.New().String(), "-", ""), + uniqueID, ) + // global cleanup for this test run + t.Cleanup(func() { + tests.CleanupBigQueryDatasets(t, context.Background(), client, []string{datasetName}) + }) + // set up data for param tool createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getBigQueryParamToolInfo(tableNameParam) - teardownTable1 := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams) - defer teardownTable1(t) + setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams) // set up data for auth tool createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getBigQueryAuthToolInfo(tableNameAuth) - teardownTable2 := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams) - defer teardownTable2(t) + setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams) // set up data for data type test tool createDataTypeTableStmt, insertDataTypeTableStmt, dataTypeToolStmt, arrayDataTypeToolStmt, dataTypeTestParams := getBigQueryDataTypeTestInfo(tableNameDataType) - teardownTable3 := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams) - defer teardownTable3(t) + setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams) // set up data for forecast tool createForecastTableStmt, insertForecastTableStmt, forecastTestParams := getBigQueryForecastToolInfo(tableNameForecast) - teardownTable4 := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams) - defer teardownTable4(t) + setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams) // set up data for analyze contribution tool createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, analyzeContributionTestParams := getBigQueryAnalyzeContributionToolInfo(tableNameAnalyzeContribution) - teardownTable5 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams) - defer teardownTable5(t) + setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams) // Write config into a file and pass it to command toolsFile := tests.GetToolsConfig(sourceConfig, BigqueryToolType, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt) @@ -205,6 +208,8 @@ func TestBigQueryToolEndpoints(t *testing.T) { } func TestBigQueryToolWithDatasetRestriction(t *testing.T) { + uniqueID := strings.ReplaceAll(uuid.New().String(), "-", "") + t.Logf("Starting restriction test with uniqueID: %s", uniqueID) ctx, cancel := context.WithTimeout(context.Background(), 4*time.Minute) defer cancel() @@ -213,11 +218,9 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) { t.Fatalf("unable to create BigQuery client: %s", err) } - // Create two datasets, one allowed, one not. - baseName := strings.ReplaceAll(uuid.New().String(), "-", "") - allowedDatasetName1 := fmt.Sprintf("allowed_dataset_1_%s", baseName) - allowedDatasetName2 := fmt.Sprintf("allowed_dataset_2_%s", baseName) - disallowedDatasetName := fmt.Sprintf("disallowed_dataset_%s", baseName) + allowedDatasetName1 := fmt.Sprintf("allowed_dataset_1_%s", uniqueID) + allowedDatasetName2 := fmt.Sprintf("allowed_dataset_2_%s", uniqueID) + disallowedDatasetName := fmt.Sprintf("disallowed_dataset_%s", uniqueID) allowedTableName1 := "allowed_table_1" allowedTableName2 := "allowed_table_2" disallowedTableName := "disallowed_table" @@ -228,56 +231,53 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) { allowedAnalyzeContributionTableName1 := "allowed_analyze_contribution_table_1" allowedAnalyzeContributionTableName2 := "allowed_analyze_contribution_table_2" disallowedAnalyzeContributionTableName := "disallowed_analyze_contribution_table" + + // global cleanup for this test run + t.Cleanup(func() { + tests.CleanupBigQueryDatasets(t, context.Background(), client, []string{allowedDatasetName1, allowedDatasetName2, disallowedDatasetName}) + }) + // Setup allowed table allowedTableNameParam1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedTableName1) createAllowedTableStmt1 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam1) - teardownAllowed1 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil) - defer teardownAllowed1(t) + setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil) allowedTableNameParam2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedTableName2) createAllowedTableStmt2 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam2) - teardownAllowed2 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil) - defer teardownAllowed2(t) + setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil) // Setup allowed forecast table allowedForecastTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedForecastTableName1) createForecastStmt1, insertForecastStmt1, forecastParams1 := getBigQueryForecastToolInfo(allowedForecastTableFullName1) - teardownAllowedForecast1 := setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1) - defer teardownAllowedForecast1(t) + setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1) allowedForecastTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedForecastTableName2) createForecastStmt2, insertForecastStmt2, forecastParams2 := getBigQueryForecastToolInfo(allowedForecastTableFullName2) - teardownAllowedForecast2 := setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2) - defer teardownAllowedForecast2(t) + setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2) // Setup disallowed table disallowedTableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedTableName) createDisallowedTableStmt := fmt.Sprintf("CREATE TABLE %s (id INT64)", disallowedTableNameParam) - teardownDisallowed := setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil) - defer teardownDisallowed(t) + setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil) // Setup disallowed forecast table disallowedForecastTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedForecastTableName) createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedForecastParams := getBigQueryForecastToolInfo(disallowedForecastTableFullName) - teardownDisallowedForecast := setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams) - defer teardownDisallowedForecast(t) + setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams) // Setup allowed analyze contribution table allowedAnalyzeContributionTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedAnalyzeContributionTableName1) createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, analyzeContributionParams1 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName1) - teardownAllowedAnalyzeContribution1 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1) - defer teardownAllowedAnalyzeContribution1(t) + setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1) allowedAnalyzeContributionTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedAnalyzeContributionTableName2) createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, analyzeContributionParams2 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName2) - teardownAllowedAnalyzeContribution2 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2) - defer teardownAllowedAnalyzeContribution2(t) + setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2) // Setup disallowed analyze contribution table disallowedAnalyzeContributionTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedAnalyzeContributionTableName) createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedAnalyzeContributionParams := getBigQueryAnalyzeContributionToolInfo(disallowedAnalyzeContributionTableFullName) - teardownDisallowedAnalyzeContribution := setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams) - defer teardownDisallowedAnalyzeContribution(t) + setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams) // Configure source with dataset restriction. sourceConfig := getBigQueryVars(t) @@ -341,6 +341,7 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() + defer cmd.Close() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() @@ -350,6 +351,11 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) { t.Fatalf("toolbox didn't start successfully: %s", err) } + // FIX: Background goroutine to drain server logs and prevent pipe buffer deadlock. + go func() { + _, _ = io.Copy(io.Discard, cmd.Out) + }() + // Run tests runListDatasetIdsWithRestriction(t, allowedDatasetName1, allowedDatasetName2) runListTableIdsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, allowedForecastTableName1, allowedAnalyzeContributionTableName1) @@ -410,6 +416,7 @@ func TestBigQueryWriteModeAllowed(t *testing.T) { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() + defer cmd.Close() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() @@ -453,6 +460,7 @@ func TestBigQueryWriteModeBlocked(t *testing.T) { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() + defer cmd.Close() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() @@ -515,6 +523,7 @@ func TestBigQueryWriteModeProtected(t *testing.T) { t.Fatalf("command initialization returned an error: %s", err) } defer cleanup() + defer cmd.Close() waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() diff --git a/tests/common.go b/tests/common.go index 7b44bf46a745..5b5e7d72a093 100644 --- a/tests/common.go +++ b/tests/common.go @@ -24,12 +24,14 @@ import ( "strings" "testing" + "cloud.google.com/go/bigquery" "github.com/google/go-cmp/cmp" "github.com/googleapis/genai-toolbox/internal/server" "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql" "github.com/googleapis/genai-toolbox/internal/testutils" "github.com/googleapis/genai-toolbox/internal/util/parameters" "github.com/jackc/pgx/v5/pgxpool" + "google.golang.org/api/iterator" ) // GetToolsConfig returns a mock tools config file @@ -1071,3 +1073,32 @@ func CleanupMSSQLTables(t *testing.T, ctx context.Context, pool *sql.DB) { } } + +func CleanupBigQueryDatasets(t *testing.T, ctx context.Context, client *bigquery.Client, datasetIDs []string) { + for _, id := range datasetIDs { + t.Logf("INTEGRATION CLEANUP: Purging dataset %s", id) + ds := client.Dataset(id) + + //Delete tables first since Dataset.Delete fails if not empty + tableIt := ds.Tables(ctx) + for { + table, err := tableIt.Next() + if err == iterator.Done { + break + } + if err != nil { + t.Errorf("INTEGRATION CLEANUP: Failed to iterate tables in %s: %v", id, err) + break + } + if err := table.Delete(ctx); err != nil { + t.Errorf("INTEGRATION CLEANUP: Failed to delete table %s: %v", table.TableID, err) + } + } + //delete empty dataset + if err := ds.Delete(ctx); err != nil { + t.Errorf("INTEGRATION CLEANUP: Failed to delete dataset %s: %v", id, err) + } else { + t.Logf("INTEGRATION CLEANUP SUCCESS: Wiped dataset %s", id) + } + } +}