Skip to content

Commit

Permalink
[Internal] Generate both SdkV2-compatible and Plugin Framework-compat…
Browse files Browse the repository at this point in the history
…ible structures (#4332)

## Changes
To make it possible to implement new resources following the Plugin
Framework's recommendation to use attributes while allowing resources to
be migrated from SDKv2 to the Plugin Framework without breaking, we will
create two copies of each code-generated structure in the Terraform
provider. The structures with no suffix should be used for new
resources. The structures with the `_SdkV2` suffix should be used for
resources in the plugin framework that are migrated from SDKv2.

## Tests
<!-- 
How is this tested? Please see the checklist below and also describe any
other relevant tests
-->

- [ ] `make test` run locally
- [ ] relevant change in `docs/` folder
- [ ] covered with integration tests in `internal/acceptance`
- [ ] using Go SDK
- [ ] using TF Plugin Framework
  • Loading branch information
mgyucht authored Dec 19, 2024
1 parent d2c6a4f commit cc758b8
Show file tree
Hide file tree
Showing 48 changed files with 148,080 additions and 6,893 deletions.
20 changes: 20 additions & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -1,20 +1,40 @@
internal/service/apps_tf/legacy_model.go linguist-generated=true
internal/service/apps_tf/model.go linguist-generated=true
internal/service/billing_tf/legacy_model.go linguist-generated=true
internal/service/billing_tf/model.go linguist-generated=true
internal/service/catalog_tf/legacy_model.go linguist-generated=true
internal/service/catalog_tf/model.go linguist-generated=true
internal/service/cleanrooms_tf/legacy_model.go linguist-generated=true
internal/service/cleanrooms_tf/model.go linguist-generated=true
internal/service/compute_tf/legacy_model.go linguist-generated=true
internal/service/compute_tf/model.go linguist-generated=true
internal/service/dashboards_tf/legacy_model.go linguist-generated=true
internal/service/dashboards_tf/model.go linguist-generated=true
internal/service/files_tf/legacy_model.go linguist-generated=true
internal/service/files_tf/model.go linguist-generated=true
internal/service/iam_tf/legacy_model.go linguist-generated=true
internal/service/iam_tf/model.go linguist-generated=true
internal/service/jobs_tf/legacy_model.go linguist-generated=true
internal/service/jobs_tf/model.go linguist-generated=true
internal/service/marketplace_tf/legacy_model.go linguist-generated=true
internal/service/marketplace_tf/model.go linguist-generated=true
internal/service/ml_tf/legacy_model.go linguist-generated=true
internal/service/ml_tf/model.go linguist-generated=true
internal/service/oauth2_tf/legacy_model.go linguist-generated=true
internal/service/oauth2_tf/model.go linguist-generated=true
internal/service/pipelines_tf/legacy_model.go linguist-generated=true
internal/service/pipelines_tf/model.go linguist-generated=true
internal/service/provisioning_tf/legacy_model.go linguist-generated=true
internal/service/provisioning_tf/model.go linguist-generated=true
internal/service/serving_tf/legacy_model.go linguist-generated=true
internal/service/serving_tf/model.go linguist-generated=true
internal/service/settings_tf/legacy_model.go linguist-generated=true
internal/service/settings_tf/model.go linguist-generated=true
internal/service/sharing_tf/legacy_model.go linguist-generated=true
internal/service/sharing_tf/model.go linguist-generated=true
internal/service/sql_tf/legacy_model.go linguist-generated=true
internal/service/sql_tf/model.go linguist-generated=true
internal/service/vectorsearch_tf/legacy_model.go linguist-generated=true
internal/service/vectorsearch_tf/model.go linguist-generated=true
internal/service/workspace_tf/legacy_model.go linguist-generated=true
internal/service/workspace_tf/model.go linguist-generated=true
9 changes: 6 additions & 3 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,10 @@ We are migrating the resource from SDKv2 to Plugin Framework provider and hence

### Adding a new resource
1. Check if the directory for this particular resource exists under `internal/providers/pluginfw/products`, if not create the directory eg: `cluster`, `volume` etc... Please note: Resources and Data sources are organized under the same package for that service.
2. Create a file with resource_resource-name.go and write the CRUD methods, schema for that resource. For reference, please take a look at existing resources eg: `resource_quality_monitor.go`. Make sure to set the user agent in all the CRUD methods. In the `Metadata()`, if the resource is to be used as default, use the method `GetDatabricksProductionName()` else use `GetDatabricksStagingName()` which suffixes the name with `_pluginframework`.
2. Create a file with resource_resource-name.go and write the CRUD methods, schema for that resource. For reference, please take a look at existing resources eg: `resource_app.go`.
- Make sure to set the user agent in all the CRUD methods.
- In the `Metadata()`, use the method `GetDatabricksProductionName()`.
- In the `Schema()` method, import the appropriate struct from the `internal/service/{package}_tf` package and use the `ResourceStructToSchema` method to convert the struct to schema. Use the struct that does not have the `_SdkV2` suffix.
3. Create a file with `resource_resource-name_acc_test.go` and add integration tests here.
4. Create a file with `resource_resource-name_test.go` and add unit tests here. Note: Please make sure to abstract specific method of the resource so they are unit test friendly and not testing internal part of terraform plugin framework library. You can compare the diagnostics, for example: please take a look at: `data_cluster_test.go`
5. Add the resource under `internal/providers/pluginfw/pluginfw.go` in `Resources()` method. Please update the list so that it stays in alphabetically sorted order.
Expand All @@ -139,9 +142,9 @@ There must not be any behaviour change or schema change when migrating a resourc
- Please make sure there are no breaking differences due to changes in schema by running: `make diff-schema`.
- Integration tests shouldn't require any major changes.
By default, `ResourceStructToSchema` will convert a `types.List` field to a `ListAttribute` or `ListNestedAttribute`. For resources or data sources migrated from the SDKv2, `ListNestedBlock` must be used for such fields. To do this, call `cs.ConfigureAsSdkV2Compatible()` in the `ResourceStructToSchema` callback:
By default, `ResourceStructToSchema` will convert a `types.List` field to a `ListAttribute` or `ListNestedAttribute`. For resources or data sources migrated from the SDKv2, `ListNestedBlock` must be used for such fields. To do this, use the `_SdkV2` variant from the `internal/service/{package}_tf` package when defining the resource schema and when interacting with the plan, config and state. Additionally, in the `Schema()` method, call `cs.ConfigureAsSdkV2Compatible()` in the `ResourceStructToSchema` callback:
```go
resp.Schema = tfschema.ResourceStructToSchema(ctx, Resource{}, func(c tfschema.CustomizableSchema) tfschema.CustomizableSchema {
resp.Schema = tfschema.ResourceStructToSchema(ctx, Resource_SdkV2{}, func(c tfschema.CustomizableSchema) tfschema.CustomizableSchema {
cs.ConfigureAsSdkV2Compatible()
// Add any additional configuration here
return cs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ type FunctionsData struct {

func (FunctionsData) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"functions": reflect.TypeOf(catalog_tf.FunctionInfo{}),
"functions": reflect.TypeOf(catalog_tf.FunctionInfo_SdkV2{}),
}
}

Expand Down
4 changes: 2 additions & 2 deletions internal/providers/pluginfw/products/cluster/data_cluster.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ type ClusterInfo struct {

func (ClusterInfo) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"cluster_info": reflect.TypeOf(compute_tf.ClusterDetails{}),
"cluster_info": reflect.TypeOf(compute_tf.ClusterDetails_SdkV2{}),
}
}

Expand Down Expand Up @@ -85,7 +85,7 @@ func (d *ClusterDataSource) Read(ctx context.Context, req datasource.ReadRequest
return
}

var tfCluster compute_tf.ClusterDetails
var tfCluster compute_tf.ClusterDetails_SdkV2
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, cluster, &tfCluster)...)
if resp.Diagnostics.HasError() {
return
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ func readLibrary(ctx context.Context, w *databricks.WorkspaceClient, waitParams
}

type LibraryExtended struct {
compute_tf.Library
compute_tf.Library_SdkV2
ClusterId types.String `tfsdk:"cluster_id"`
ID types.String `tfsdk:"id" tf:"optional,computed"` // Adding ID field to stay compatible with SDKv2
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ func waitForMonitor(ctx context.Context, w *databricks.WorkspaceClient, monitor
}

type MonitorInfoExtended struct {
catalog_tf.MonitorInfo
catalog_tf.MonitorInfo_SdkV2
WarehouseId types.String `tfsdk:"warehouse_id" tf:"optional"`
SkipBuiltinDashboard types.Bool `tfsdk:"skip_builtin_dashboard" tf:"optional"`
ID types.String `tfsdk:"id" tf:"optional,computed"` // Adding ID field to stay compatible with SDKv2
Expand All @@ -65,7 +65,7 @@ type MonitorInfoExtended struct {
var _ pluginfwcommon.ComplexFieldTypeProvider = MonitorInfoExtended{}

func (m MonitorInfoExtended) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type {
return m.MonitorInfo.GetComplexFieldTypes(ctx)
return m.MonitorInfo_SdkV2.GetComplexFieldTypes(ctx)
}

type QualityMonitorResource struct {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ type RegisteredModelData struct {

func (RegisteredModelData) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"model_info": reflect.TypeOf(catalog_tf.RegisteredModelInfo{}),
"model_info": reflect.TypeOf(catalog_tf.RegisteredModelInfo_SdkV2{}),
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ type RegisteredModelVersionsData struct {

func (RegisteredModelVersionsData) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"model_versions": reflect.TypeOf(catalog_tf.ModelVersionInfo{}),
"model_versions": reflect.TypeOf(catalog_tf.ModelVersionInfo_SdkV2{}),
}
}

Expand Down Expand Up @@ -76,13 +76,13 @@ func (d *RegisteredModelVersionsDataSource) Read(ctx context.Context, req dataso
}
var tfModelVersions []attr.Value
for _, modelVersionSdk := range modelVersions.ModelVersions {
var modelVersion catalog_tf.ModelVersionInfo
var modelVersion catalog_tf.ModelVersionInfo_SdkV2
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, modelVersionSdk, &modelVersion)...)
if resp.Diagnostics.HasError() {
return
}
tfModelVersions = append(tfModelVersions, modelVersion.ToObjectValue(ctx))
}
registeredModelVersions.ModelVersions = types.ListValueMust(catalog_tf.ModelVersionInfo{}.Type(ctx), tfModelVersions)
registeredModelVersions.ModelVersions = types.ListValueMust(catalog_tf.ModelVersionInfo_SdkV2{}.Type(ctx), tfModelVersions)
resp.Diagnostics.Append(resp.State.Set(ctx, registeredModelVersions)...)
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ type ServingEndpointsData struct {

func (ServingEndpointsData) GetComplexFieldTypes(context.Context) map[string]reflect.Type {
return map[string]reflect.Type{
"endpoints": reflect.TypeOf(serving_tf.ServingEndpoint{}),
"endpoints": reflect.TypeOf(serving_tf.ServingEndpoint_SdkV2{}),
}
}

Expand Down Expand Up @@ -77,13 +77,13 @@ func (d *ServingEndpointsDataSource) Read(ctx context.Context, req datasource.Re
}
tfEndpoints := []attr.Value{}
for _, endpoint := range endpointsInfoSdk {
var endpointsInfo serving_tf.ServingEndpoint
var endpointsInfo serving_tf.ServingEndpoint_SdkV2
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, endpoint, &endpointsInfo)...)
if resp.Diagnostics.HasError() {
return
}
tfEndpoints = append(tfEndpoints, endpointsInfo.ToObjectValue(ctx))
}
endpoints.Endpoints = types.ListValueMust(serving_tf.ServingEndpoint{}.Type(ctx), tfEndpoints)
endpoints.Endpoints = types.ListValueMust(serving_tf.ServingEndpoint_SdkV2{}.Type(ctx), tfEndpoints)
resp.Diagnostics.Append(resp.State.Set(ctx, endpoints)...)
}
22 changes: 12 additions & 10 deletions internal/providers/pluginfw/products/sharing/resource_share.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@ func ResourceShare() resource.Resource {
}

type ShareInfoExtended struct {
sharing_tf.ShareInfo
sharing_tf.ShareInfo_SdkV2
}

var _ pluginfwcommon.ComplexFieldTypeProvider = ShareInfoExtended{}

func (s ShareInfoExtended) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type {
return s.ShareInfo.GetComplexFieldTypes(ctx)
return s.ShareInfo_SdkV2.GetComplexFieldTypes(ctx)
}

func matchOrder[T any, K comparable](target, reference []T, keyFunc func(T) K) {
Expand Down Expand Up @@ -155,6 +155,8 @@ func (r *ShareResource) Schema(ctx context.Context, req resource.SchemaRequest,
c.SetRequired("object", "data_object_type")
c.SetRequired("object", "partition", "value", "op")
c.SetRequired("object", "partition", "value", "name")

c.SetOptional("owner")
return c
})
resp.Schema = schema.Schema{
Expand Down Expand Up @@ -408,36 +410,36 @@ func (r *ShareResource) Delete(ctx context.Context, req resource.DeleteRequest,
}

type effectiveFieldsAction interface {
resourceLevel(*ShareInfoExtended, sharing_tf.ShareInfo)
objectLevel(*sharing_tf.SharedDataObject, sharing_tf.SharedDataObject)
resourceLevel(*ShareInfoExtended, sharing_tf.ShareInfo_SdkV2)
objectLevel(*sharing_tf.SharedDataObject_SdkV2, sharing_tf.SharedDataObject_SdkV2)
}

type effectiveFieldsActionCreateOrUpdate struct{}

func (effectiveFieldsActionCreateOrUpdate) resourceLevel(state *ShareInfoExtended, plan sharing_tf.ShareInfo) {
func (effectiveFieldsActionCreateOrUpdate) resourceLevel(state *ShareInfoExtended, plan sharing_tf.ShareInfo_SdkV2) {
state.SyncEffectiveFieldsDuringCreateOrUpdate(plan)
}

func (effectiveFieldsActionCreateOrUpdate) objectLevel(state *sharing_tf.SharedDataObject, plan sharing_tf.SharedDataObject) {
func (effectiveFieldsActionCreateOrUpdate) objectLevel(state *sharing_tf.SharedDataObject_SdkV2, plan sharing_tf.SharedDataObject_SdkV2) {
state.SyncEffectiveFieldsDuringCreateOrUpdate(plan)
}

type effectiveFieldsActionRead struct{}

func (effectiveFieldsActionRead) resourceLevel(state *ShareInfoExtended, plan sharing_tf.ShareInfo) {
func (effectiveFieldsActionRead) resourceLevel(state *ShareInfoExtended, plan sharing_tf.ShareInfo_SdkV2) {
state.SyncEffectiveFieldsDuringRead(plan)
}

func (effectiveFieldsActionRead) objectLevel(state *sharing_tf.SharedDataObject, plan sharing_tf.SharedDataObject) {
func (effectiveFieldsActionRead) objectLevel(state *sharing_tf.SharedDataObject_SdkV2, plan sharing_tf.SharedDataObject_SdkV2) {
state.SyncEffectiveFieldsDuringRead(plan)
}

func (r *ShareResource) syncEffectiveFields(ctx context.Context, plan, state ShareInfoExtended, mode effectiveFieldsAction) (ShareInfoExtended, diag.Diagnostics) {
var d diag.Diagnostics
mode.resourceLevel(&state, plan.ShareInfo)
mode.resourceLevel(&state, plan.ShareInfo_SdkV2)
planObjects, _ := plan.GetObjects(ctx)
stateObjects, _ := state.GetObjects(ctx)
finalObjects := []sharing_tf.SharedDataObject{}
finalObjects := []sharing_tf.SharedDataObject_SdkV2{}
for i := range stateObjects {
mode.objectLevel(&stateObjects[i], planObjects[i])
finalObjects = append(finalObjects, stateObjects[i])
Expand Down
Loading

0 comments on commit cc758b8

Please sign in to comment.