Skip to content

Commit

Permalink
Enforce mutual exclusivity among view, materialized view, and schema …
Browse files Browse the repository at this point in the history
…in BigQuery table config
  • Loading branch information
wj-chen committed Jul 31, 2023
1 parent ccbd669 commit 5a902d2
Show file tree
Hide file tree
Showing 2 changed files with 161 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -492,6 +492,7 @@ func ResourceBigQueryTable() *schema.Resource {
// Schema: Optional] The schema for the data.
// Schema is required for CSV and JSON formats if autodetect is not on.
// Schema is disallowed for Google Cloud Bigtable, Cloud Datastore backups, Avro, Iceberg, ORC, and Parquet formats.
// Schema is mutually exclusive with View and Materialized View.
"schema": {
Type: schema.TypeString,
Optional: true,
Expand All @@ -502,7 +503,8 @@ func ResourceBigQueryTable() *schema.Resource {
json, _ := structure.NormalizeJsonString(v)
return json
},
Description: `A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.`,
Description: `A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.`,
ConflictsWith: []string{"view", "materialized_view"},
},
// CsvOptions: [Optional] Additional properties to set if
// sourceFormat is set to CSV.
Expand Down Expand Up @@ -774,6 +776,7 @@ func ResourceBigQueryTable() *schema.Resource {
Description: `A JSON schema for the table.`,
},
// View: [Optional] If specified, configures this table as a view.
// View is mutually exclusive with Schema and Materialized View.
"view": {
Type: schema.TypeList,
Optional: true,
Expand All @@ -800,9 +803,11 @@ func ResourceBigQueryTable() *schema.Resource {
},
},
},
ConflictsWith: []string{"schema", "materialized_view"},
},

// Materialized View: [Optional] If specified, configures this table as a materialized view.
// Materialized View is mutually exclusive with Schema and View.
"materialized_view": {
Type: schema.TypeList,
Optional: true,
Expand Down Expand Up @@ -839,6 +844,7 @@ func ResourceBigQueryTable() *schema.Resource {
},
},
},
ConflictsWith: []string{"schema", "view"},
},

// TimePartitioning: [Experimental] If specified, configures time-based
Expand Down Expand Up @@ -1170,41 +1176,16 @@ func resourceBigQueryTableCreate(d *schema.ResourceData, meta interface{}) error

datasetID := d.Get("dataset_id").(string)

if table.View != nil && table.Schema != nil {
log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId)

log.Printf("[INFO] Removing schema from table definition because big query does not support setting schema on view creation")
schemaBack := table.Schema
table.Schema = nil

log.Printf("[INFO] Creating BigQuery table: %s without schema", table.TableReference.TableId)

res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))

table.Schema = schemaBack
log.Printf("[INFO] Updating BigQuery table: %s with schema", table.TableReference.TableId)
if _, err = config.NewBigQueryClient(userAgent).Tables.Update(project, datasetID, res.TableReference.TableId, table).Do(); err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been update with schema", res.Id)
} else {
log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId)

res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))
res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))

return resourceBigQueryTableRead(d, meta)
}

Expand Down
147 changes: 147 additions & 0 deletions mmv1/third_party/terraform/tests/resource_bigquery_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,7 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) {
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
<<<<<<< HEAD
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description1"),
},
{
Expand All @@ -383,6 +384,10 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) {
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
=======
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description"),
ExpectError: regexp.MustCompile("\"view\": conflicts with schema"),
>>>>>>> 5471308bd (add ConflictsWith to resource_bigquery_table)
},
},
})
Expand Down Expand Up @@ -487,6 +492,51 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing
})
}

<<<<<<< HEAD
=======
func TestAccBigQueryTable_MaterializedView_WithSchema(t *testing.T) {
t.Parallel()

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
materializedViewID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID)

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, materializedViewID, query),
ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with schema"),
},
},
})
}

func TestAccBigQueryTable_MaterializedView_WithView(t *testing.T) {
t.Parallel()

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
materializedViewID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID)

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithMatViewAndView(datasetID, tableID, materializedViewID, query),
ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with view"),
},
},
})
}

>>>>>>> 5471308bd (add ConflictsWith to resource_bigquery_table)
func TestAccBigQueryExternalDataTable_parquet(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -1554,6 +1604,103 @@ resource "google_bigquery_table" "mv_test" {
`, datasetID, tableID, mViewID, enable_refresh, refresh_interval, query)
}

func testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, mViewID, query string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "some_int",
"type": "INTEGER"
}
]
EOH
}
resource "google_bigquery_table" "mv_test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
materialized_view {
enable_refresh = true
refresh_interval_ms = 360000
query = "%s"
}
schema = <<EOH
[
{
"description": "special new description with capital letter Z",
"name": "some_int",
"type": "INTEGER"
}
]
EOH
depends_on = [
google_bigquery_table.test,
]
}
`, datasetID, tableID, mViewID, query)
}

func testAccBigQueryTableWithMatViewAndView(datasetID, tableID, mViewID, query string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "some_int",
"type": "INTEGER"
}
]
EOH
}
resource "google_bigquery_table" "mv_test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
view {
query = <<SQL
select "val1" as col1, "val2" as col2
SQL
use_legacy_sql = false
}
materialized_view {
enable_refresh = true
refresh_interval_ms = 360000
query = "%s"
}
depends_on = [
google_bigquery_table.test,
]
}
`, datasetID, tableID, mViewID, query)
}

func testAccBigQueryTableUpdated(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down

0 comments on commit 5a902d2

Please sign in to comment.