From 5a902d2801817c5841adb01a154beee3296f434e Mon Sep 17 00:00:00 2001 From: Weijia Chen Date: Fri, 2 Jun 2023 21:30:01 +0000 Subject: [PATCH] Enforce mutual exclusivity among view, materialized view, and schema in BigQuery table config --- .../bigquery/resource_bigquery_table.go | 47 ++---- .../tests/resource_bigquery_table_test.go | 147 ++++++++++++++++++ 2 files changed, 161 insertions(+), 33 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go index b95488961d3b..5c5ccb3e1675 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go @@ -492,6 +492,7 @@ func ResourceBigQueryTable() *schema.Resource { // Schema: Optional] The schema for the data. // Schema is required for CSV and JSON formats if autodetect is not on. // Schema is disallowed for Google Cloud Bigtable, Cloud Datastore backups, Avro, Iceberg, ORC, and Parquet formats. + // Schema is mutually exclusive with View and Materialized View. "schema": { Type: schema.TypeString, Optional: true, @@ -502,7 +503,8 @@ func ResourceBigQueryTable() *schema.Resource { json, _ := structure.NormalizeJsonString(v) return json }, - Description: `A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.`, + Description: `A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.`, + ConflictsWith: []string{"view", "materialized_view"}, }, // CsvOptions: [Optional] Additional properties to set if // sourceFormat is set to CSV. @@ -774,6 +776,7 @@ func ResourceBigQueryTable() *schema.Resource { Description: `A JSON schema for the table.`, }, // View: [Optional] If specified, configures this table as a view. + // View is mutually exclusive with Schema and Materialized View. "view": { Type: schema.TypeList, Optional: true, @@ -800,9 +803,11 @@ func ResourceBigQueryTable() *schema.Resource { }, }, }, + ConflictsWith: []string{"schema", "materialized_view"}, }, // Materialized View: [Optional] If specified, configures this table as a materialized view. + // Materialized View is mutually exclusive with Schema and View. "materialized_view": { Type: schema.TypeList, Optional: true, @@ -839,6 +844,7 @@ func ResourceBigQueryTable() *schema.Resource { }, }, }, + ConflictsWith: []string{"schema", "view"}, }, // TimePartitioning: [Experimental] If specified, configures time-based @@ -1170,41 +1176,16 @@ func resourceBigQueryTableCreate(d *schema.ResourceData, meta interface{}) error datasetID := d.Get("dataset_id").(string) - if table.View != nil && table.Schema != nil { + log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId) - log.Printf("[INFO] Removing schema from table definition because big query does not support setting schema on view creation") - schemaBack := table.Schema - table.Schema = nil - - log.Printf("[INFO] Creating BigQuery table: %s without schema", table.TableReference.TableId) - - res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do() - if err != nil { - return err - } - - log.Printf("[INFO] BigQuery table %s has been created", res.Id) - d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId)) - - table.Schema = schemaBack - log.Printf("[INFO] Updating BigQuery table: %s with schema", table.TableReference.TableId) - if _, err = config.NewBigQueryClient(userAgent).Tables.Update(project, datasetID, res.TableReference.TableId, table).Do(); err != nil { - return err - } - - log.Printf("[INFO] BigQuery table %s has been update with schema", res.Id) - } else { - log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId) - - res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do() - if err != nil { - return err - } - - log.Printf("[INFO] BigQuery table %s has been created", res.Id) - d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId)) + res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do() + if err != nil { + return err } + log.Printf("[INFO] BigQuery table %s has been created", res.Id) + d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId)) + return resourceBigQueryTableRead(d, meta) } diff --git a/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go b/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go index 229d32af49d3..83ac57c0bdb4 100644 --- a/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go @@ -367,6 +367,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { +<<<<<<< HEAD Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description1"), }, { @@ -383,6 +384,10 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) { ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, +======= + Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description"), + ExpectError: regexp.MustCompile("\"view\": conflicts with schema"), +>>>>>>> 5471308bd (add ConflictsWith to resource_bigquery_table) }, }, }) @@ -487,6 +492,51 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing }) } +<<<<<<< HEAD +======= +func TestAccBigQueryTable_MaterializedView_WithSchema(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10)) + tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10)) + materializedViewID := fmt.Sprintf("tf_test_%s", RandString(t, 10)) + query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID) + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, materializedViewID, query), + ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with schema"), + }, + }, + }) +} + +func TestAccBigQueryTable_MaterializedView_WithView(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10)) + tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10)) + materializedViewID := fmt.Sprintf("tf_test_%s", RandString(t, 10)) + query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID) + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableWithMatViewAndView(datasetID, tableID, materializedViewID, query), + ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with view"), + }, + }, + }) +} + +>>>>>>> 5471308bd (add ConflictsWith to resource_bigquery_table) func TestAccBigQueryExternalDataTable_parquet(t *testing.T) { t.Parallel() @@ -1554,6 +1604,103 @@ resource "google_bigquery_table" "mv_test" { `, datasetID, tableID, mViewID, enable_refresh, refresh_interval, query) } +func testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, mViewID, query string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "%s" +} + +resource "google_bigquery_table" "test" { + deletion_protection = false + table_id = "%s" + dataset_id = google_bigquery_dataset.test.dataset_id + + schema = <