diff --git a/client/api.go b/client/api.go index 099e978c..885bff01 100644 --- a/client/api.go +++ b/client/api.go @@ -73,6 +73,12 @@ func (c *Client) CreateSourceDataset(ctx context.Context, workspaceId string, da return c.Meta.SaveSourceDataset(ctx, workspaceId, dataset, table) } +// List all datasets, but only asks for id and name to prevent looping in expensive +// resolvers +func (c *Client) ListDatasetsIdNameOnly(ctx context.Context) ([]*meta.DatasetIdName, error) { + return c.Meta.ListDatasetsIdNameOnly(ctx) +} + // UpdateSourceDataset updates the existing source dataset func (c *Client) UpdateSourceDataset(ctx context.Context, workspaceId string, id string, dataset *meta.DatasetDefinitionInput, table *meta.SourceTableDefinitionInput) (*meta.Dataset, error) { if !c.Flags[flagObs2110] { @@ -811,6 +817,12 @@ func (c *Client) GetWorksheet(ctx context.Context, id string) (*meta.Worksheet, return c.Meta.GetWorksheet(ctx, id) } +// List all worksheets, but only fetch ids and labels to prevent using expensive +// resolvers +func (c *Client) ListWorksheetIdLabelOnly(ctx context.Context, workspaceId string) ([]*meta.WorksheetIdLabel, error) { + return c.Meta.ListWorksheetIdLabelOnly(ctx, workspaceId) +} + // UpdateWorksheet updates a worksheet // XXX: this should not have to take workspaceId, but API forces us to func (c *Client) UpdateWorksheet(ctx context.Context, id string, workspaceId string, input *meta.WorksheetInput) (*meta.Worksheet, error) { @@ -1215,6 +1227,11 @@ func (c *Client) LookupUser(ctx context.Context, email string) (*meta.User, erro return c.Meta.LookupUser(ctx, email) } +// List all users in current customer +func (c *Client) ListUsers(ctx context.Context) ([]meta.User, error) { + return c.Meta.ListUsers(ctx) +} + // CreateRbacGroupmember creates an rbacgroupmember func (c *Client) CreateRbacGroupmember(ctx context.Context, input *meta.RbacGroupmemberInput) (*meta.RbacGroupmember, error) { if !c.Flags[flagObs2110] { diff --git a/client/binding/binding.go b/client/binding/binding.go new file mode 100644 index 00000000..3518e345 --- /dev/null +++ b/client/binding/binding.go @@ -0,0 +1,317 @@ +package binding + +import ( + "context" + "encoding/json" + "fmt" + "regexp" + "sort" + "strings" + + observe "github.com/observeinc/terraform-provider-observe/client" + "github.com/observeinc/terraform-provider-observe/client/meta/types" + "github.com/observeinc/terraform-provider-observe/client/oid" +) + +var ( + replaceInvalid = regexp.MustCompile(`([^0-9a-zA-Z-_]+)`) + hasLeadingDigit = regexp.MustCompile(`^[0-9]`) +) + +type ResourceCacheEntry struct { + TfName string + Label string +} + +type ResourceCache struct { + idToLabel map[Ref]ResourceCacheEntry + workspaceOid *oid.OID + workspaceEntry *ResourceCacheEntry +} + +func NewResourceCache(ctx context.Context, kinds KindSet, client *observe.Client) (ResourceCache, error) { + var cache = ResourceCache{idToLabel: make(map[Ref]ResourceCacheEntry)} + // special case: one workspace per customer, always needed for lookup + workspaces, err := client.ListWorkspaces(ctx) + if err != nil { + return cache, err + } + cache.addEntry(KindWorkspace, workspaces[0].Label, workspaces[0].Oid().String(), nil, make(map[string]struct{})) + cache.workspaceOid = workspaces[0].Oid() + cache.workspaceEntry = cache.LookupId(KindWorkspace, cache.workspaceOid.String()) + + for resourceKind := range kinds { + // colisions are really bad, so make a best effort to prevent them + existingResourceNames := make(map[string]struct{}) + disambiguator := 1 + switch resourceKind { + case KindDataset: + datasets, err := client.ListDatasetsIdNameOnly(ctx) + if err != nil { + return cache, err + } + for _, ds := range datasets { + cache.addEntry(KindDataset, ds.Name, ds.Id, &disambiguator, existingResourceNames) + } + case KindWorksheet: + worksheets, err := client.ListWorksheetIdLabelOnly(ctx, cache.workspaceOid.Id) + if err != nil { + return cache, err + } + for _, wk := range worksheets { + cache.addEntry(KindWorksheet, wk.Label, wk.Id, &disambiguator, existingResourceNames) + } + case KindUser: + users, err := client.ListUsers(ctx) + if err != nil { + return cache, err + } + for _, user := range users { + cache.addEntry(KindUser, user.Label, user.Id.String(), &disambiguator, existingResourceNames) + } + } + } + return cache, nil +} + +func (c *ResourceCache) addEntry(kind Kind, label string, id string, disambiguator *int, existingNames map[string]struct{}) { + resourceName := sanitizeIdentifier(label) + if _, found := existingNames[resourceName]; found { + resourceName = fmt.Sprintf("%s_%d", resourceName, *disambiguator) + *disambiguator++ + } + var empty struct{} + existingNames[resourceName] = empty + c.idToLabel[Ref{kind: kind, key: id}] = ResourceCacheEntry{ + TfName: resourceName, + Label: label, + } +} + +func (c *ResourceCache) LookupId(kind Kind, id string) *ResourceCacheEntry { + maybeEnt, ok := c.idToLabel[Ref{kind: kind, key: id}] + if !ok { + return nil + } + return &maybeEnt +} + +type Generator struct { + Enabled bool + resourceType string + resourceName string + enabledBindings KindSet + bindings Mapping + cache ResourceCache +} + +func NewGenerator(ctx context.Context, enabled bool, resourceType string, resourceName string, + client *observe.Client, enabledBindings KindSet) (Generator, error) { + enabled = enabled && client.Config.ExportObjectBindings + if !enabled { + return Generator{Enabled: false}, nil + } + rc, err := NewResourceCache(ctx, enabledBindings, client) + if err != nil { + return Generator{}, err + } + bindings := NewMapping() + return Generator{ + Enabled: true, + resourceType: resourceType, + resourceName: resourceName, + enabledBindings: enabledBindings, + bindings: bindings, + cache: rc, + }, nil +} + +// lookup by kind and id, if valid and enabled then return a loval variable reference, +// otherwise return the id (no-op) +func (g *Generator) TryBind(kind Kind, id string) string { + if !g.Enabled { + return id + } + var e *ResourceCacheEntry + if kind == KindWorkspace && id == g.cache.workspaceOid.String() { + // workspaces are special since there should only be one primary one + e = g.cache.workspaceEntry + } else { + // lookup + e = g.cache.LookupId(kind, id) + if e == nil { + return id + } + } + // process into local var ref + terraformLocal := g.fmtTfLocalVar(kind, e.TfName) + g.bindings[Ref{kind: kind, key: e.Label}] = Target{ + TfName: e.TfName, + TfLocalBindingVar: terraformLocal, + } + return g.fmtTfLocalVarRef(terraformLocal) +} + +func (g *Generator) Generate(data interface{}) { + mapOverJsonStringKeys(data, func(key string, value string, jsonMapNode map[string]interface{}) { + kinds := resolveKeyToKinds(key) + for _, kind := range kinds { + // if not enabled, skip + if _, found := g.enabledBindings[kind]; !found { + continue + } + // try bind the name + maybeRef := g.TryBind(kind, value) + // if lookup succeeded, then the returned value should be a lv ref and not the + // input id + if maybeRef != value { + jsonMapNode[key] = maybeRef + break + } + } + }) +} + +func (g *Generator) GenerateJson(jsonStr []byte) ([]byte, error) { + if !g.Enabled { + return jsonStr, nil + } + serialized, err := transformJson(jsonStr, func(dataPtr *interface{}) error { + g.Generate(*dataPtr) + return nil + }) + if err != nil { + return nil, err + } + return serialized, nil +} + +func (g *Generator) InsertBindingsObject(data map[string]interface{}) error { + enabledList := make([]Kind, 0) + for binding := range g.enabledBindings { + enabledList = append(enabledList, binding) + } + // sort for stability of comparison later on + sort.Slice(enabledList, func(i int, j int) bool { + return string(enabledList[i]) < string(enabledList[j]) + }) + + workspaceTarget := g.cache.workspaceEntry + if workspaceTarget == nil { + return fmt.Errorf("Internal error: workspace was not resolved correctly.") + } + bindingsObject := BindingsObject{ + Mappings: g.bindings, + Kinds: enabledList, + Workspace: Target{ + TfLocalBindingVar: g.fmtTfLocalVar(KindWorkspace, workspaceTarget.TfName), + TfName: workspaceTarget.TfName, + }, + WorkspaceName: g.cache.workspaceEntry.Label, + } + data[bindingsKey] = bindingsObject + return nil +} + +func (g *Generator) InsertBindingsObjectJson(jsonData *types.JsonObject) (*types.JsonObject, error) { + if !g.Enabled { + return jsonData, nil + } + serialized, err := transformJson([]byte(jsonData.String()), func(dataPtr *interface{}) error { + return g.InsertBindingsObject((*dataPtr).(map[string]interface{})) + }) + if err != nil { + return nil, err + } + return types.JsonObject(serialized).Ptr(), nil +} + +func (g *Generator) fmtTfLocalVar(kind Kind, targetTfName string) string { + return sanitizeIdentifier(fmt.Sprintf("binding__%s_%s__%s_%s", g.resourceType, g.resourceName, kind, targetTfName)) +} + +func (g *Generator) fmtTfLocalVarRef(tfLocalVar string) string { + return fmt.Sprintf("${local.%s}", tfLocalVar) +} + +func resolveKeyToKinds(key string) []Kind { + switch key { + case "id": + return []Kind{KindDataset, KindWorksheet} + case "datasetId": + fallthrough + case "keyForDatasetId": + fallthrough + case "sourceDatasetId": + fallthrough + case "targetDataset": + fallthrough + case "dataset": + return []Kind{KindDataset} + case "workspaceId": + return []Kind{KindWorkspace} + case "userId": + return []Kind{KindUser} + default: + return []Kind{} + } +} + +func mapOverJsonStringKeys(data interface{}, f func(key string, value string, jsonMapNode map[string]interface{})) { + var stack []interface{} + stack = append(stack, data) + for len(stack) > 0 { + var cur interface{} + cur, stack = stack[len(stack)-1], stack[:len(stack)-1] + switch jsonNode := cur.(type) { + case map[string]interface{}: + for k, v := range jsonNode { + switch kvValue := v.(type) { + case string: + f(k, kvValue, jsonNode) + // if value looks like a composite type, push onto stack for further + // processing + case map[string]interface{}: + stack = append(stack, kvValue) + case []interface{}: + stack = append(stack, kvValue) + } + } + case []interface{}: + for _, object := range jsonNode { + stack = append(stack, object) + } + } + } +} + +func transformJson(data []byte, f func(data *interface{}) error) ([]byte, error) { + var deserialized interface{} + err := json.Unmarshal(data, &deserialized) + if err != nil { + return nil, fmt.Errorf("Failed to deserialize json: %w", err) + } + err = f(&deserialized) + if err != nil { + return nil, fmt.Errorf("Failed to transform json data: %w", err) + + } + serialized, err := json.Marshal(deserialized) + if err != nil { + return nil, fmt.Errorf("Failed to serialize json data: %w", err) + } + return serialized, nil +} + +func sanitizeIdentifier(name string) string { + path := strings.Split(name, "/") + + shortForm := strings.ToLower(path[len(path)-1]) + sanitized := replaceInvalid.ReplaceAllString(shortForm, "_") + + if hasLeadingDigit.MatchString(sanitized) { + sanitized = "_" + sanitized + } + + return sanitized +} diff --git a/client/binding/binding_test.go b/client/binding/binding_test.go new file mode 100644 index 00000000..16a831fa --- /dev/null +++ b/client/binding/binding_test.go @@ -0,0 +1,173 @@ +package binding + +import ( + "encoding/json" + "reflect" + "testing" + + "github.com/observeinc/terraform-provider-observe/client/meta/types" + "github.com/observeinc/terraform-provider-observe/client/oid" +) + +const ( + inputJson = ` + { + "bv": false, + "datasetId": "41000123", + "id": "41000123", + "iv": 1231231, + "nested_field": { + "dataset": "41000123", + "datasetId": "1231231", + "id": "41000201", + "sv": "1231231", + "targetDataset": "41000200" + }, + "sv": "41000121", + "userId": "41000100", + "workspaceId": "o:::workspace:41000001" + } + ` + expectedJson = ` + { + "bv": false, + "datasetId": "${local.binding__type_name__dataset_dataset_1}", + "id": "${local.binding__type_name__dataset_dataset_1}", + "iv": 1231231, + "nested_field": { + "dataset": "${local.binding__type_name__dataset_dataset_1}", + "datasetId": "1231231", + "id": "${local.binding__type_name__worksheet_worksheet_1}", + "sv": "1231231", + "targetDataset": "${local.binding__type_name__dataset_dataset_2}" + }, + "sv": "41000121", + "userId": "${local.binding__type_name__user_basic_user}", + "workspaceId": "${local.binding__type_name__workspace_workspace_1}" + } + ` +) + +func prepareResourceCacheFixture() ResourceCache { + r := ResourceCache{ + idToLabel: make(map[Ref]ResourceCacheEntry), + workspaceOid: &oid.OID{Type: oid.TypeWorkspace, Id: "41000001"}, + workspaceEntry: &ResourceCacheEntry{ + TfName: "workspace_1", + Label: "Workspace 1", + }, + } + disambiguator := 1 + existingResourceNames := make(map[string]struct{}) + r.addEntry(KindDataset, "dataset_1", "41000123", &disambiguator, existingResourceNames) + r.addEntry(KindDataset, "dataset_2", "41000200", &disambiguator, existingResourceNames) + r.addEntry(KindWorkspace, "default", "41000101", &disambiguator, existingResourceNames) + r.addEntry(KindWorksheet, "worksheet_1", "41000201", &disambiguator, existingResourceNames) + r.addEntry(KindUser, "basic_user", "41000100", &disambiguator, existingResourceNames) + return r +} + +func prepareGeneratorFixture() Generator { + return Generator{ + Enabled: true, + resourceName: "name", + resourceType: "type", + enabledBindings: NewKindSet(KindWorksheet, KindDataset, KindWorkspace, KindUser), + bindings: NewMapping(), + cache: prepareResourceCacheFixture(), + } +} + +func TestTryBind(t *testing.T) { + g := prepareGeneratorFixture() + binding := g.TryBind(KindDataset, "41000123") + expectedBinding := "${local.binding__type_name__dataset_dataset_1}" + if binding != expectedBinding { + t.Fatalf("expected binding %s, got actual binding %s", expectedBinding, binding) + } + binding = g.TryBind(KindDataset, "not_a_dataset_id") + expectedBinding = "not_a_dataset_id" + if binding != expectedBinding { + t.Fatalf("Expected no binding '%s', got binding %s", expectedBinding, binding) + } +} + +func TestGenerate(t *testing.T) { + var input map[string]interface{} + var expected map[string]interface{} + if err := json.Unmarshal([]byte(inputJson), &input); err != nil { + t.Fatal(err) + } + if err := json.Unmarshal([]byte(expectedJson), &expected); err != nil { + t.Fatal(err) + } + g := prepareGeneratorFixture() + g.Generate(input) + if !reflect.DeepEqual(input, expected) { + t.Fatalf("expected %#v, got %#v", expected, input) + } +} + +func TestGenerateJson(t *testing.T) { + g := prepareGeneratorFixture() + outputJson, err := g.GenerateJson([]byte(inputJson)) + if err != nil { + t.Fatal(err) + } + var expected map[string]interface{} + var output map[string]interface{} + if err := json.Unmarshal([]byte(expectedJson), &expected); err != nil { + t.Fatal(err) + } + if err := json.Unmarshal(outputJson, &output); err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(output, expected) { + t.Fatalf("expected %#v, got %#v", expected, output) + } +} + +func TestInsertBindingsObjectJson(t *testing.T) { + g := prepareGeneratorFixture() + g.bindings[Ref{kind: KindDataset, key: "dataset_1"}] = Target{ + TfLocalBindingVar: g.fmtTfLocalVar(KindDataset, "dataset_1"), + TfName: "dataset_1", + } + g.enabledBindings = NewKindSet(KindDataset, KindWorkspace) + jsonData := ` + { + "data_fld_1": "value" + } + ` + expected := map[string]interface{}{ + "data_fld_1": "value", + "bindings": map[string]interface{}{ + "mappings": map[string]interface{}{ + "dataset:dataset_1": map[string]interface{}{ + "tf_local_binding_var": "binding__type_name__dataset_dataset_1", + "tf_name": "dataset_1", + }, + }, + "kinds": []interface{}{ + "dataset", + "workspace", + }, + "workspace": map[string]interface{}{ + "tf_local_binding_var": "binding__type_name__workspace_workspace_1", + "tf_name": "workspace_1", + }, + "workspace_name": "Workspace 1", + }, + } + outputJson, err := g.InsertBindingsObjectJson((*types.JsonObject)(&jsonData)) + if err != nil { + t.Fatal(err) + } + output, err := outputJson.Map() + if err != nil { + t.Fatal(err) + } + if !reflect.DeepEqual(output, expected) { + t.Fatalf("expected %#v, got %#v", expected, output) + } +} diff --git a/client/binding/types.go b/client/binding/types.go new file mode 100644 index 00000000..55ef1cc3 --- /dev/null +++ b/client/binding/types.go @@ -0,0 +1,91 @@ +package binding + +import ( + "fmt" + "regexp" +) + +type Ref struct { + kind Kind + key string +} + +type Target struct { + TfLocalBindingVar string `json:"tf_local_binding_var"` + TfName string `json:"tf_name"` +} + +type Mapping map[Ref]Target + +type Kind string + +type KindSet map[Kind]struct{} + +type BindingsObject struct { + Mappings Mapping `json:"mappings"` + Kinds []Kind `json:"kinds"` + Workspace Target `json:"workspace"` + WorkspaceName string `json:"workspace_name"` +} + +const ( + KindDataset Kind = "dataset" + KindWorksheet Kind = "worksheet" + KindWorkspace Kind = "workspace" + KindUser Kind = "user" +) + +const ( + bindingsKey = "bindings" +) + +var bindingRefParseRegex = regexp.MustCompile(`(.*):(.*)`) + +var allKinds = NewKindSet( + KindDataset, + KindWorksheet, + KindWorkspace, + KindUser, +) + +func (r *Ref) String() string { + return fmt.Sprintf("%s:%s", r.kind, r.key) +} + +func (r Ref) MarshalText() (text []byte, err error) { + return []byte(r.String()), nil +} + +func (r *Ref) UnmarshalText(text []byte) error { + ref, ok := NewRefFromString(string(text)) + if !ok { + return fmt.Errorf("failed to unmarshal reference type") + } + *r = ref + return nil +} + +func NewRefFromString(s string) (Ref, bool) { + matches := bindingRefParseRegex.FindStringSubmatch(s) + if len(matches) == 0 { + return Ref{}, false + } + maybeKind := Kind(matches[1]) + if _, ok := allKinds[maybeKind]; !ok { + return Ref{}, false + } + return Ref{kind: maybeKind, key: matches[2]}, true +} + +func NewMapping() Mapping { + return make(Mapping) +} + +func NewKindSet(kinds ...Kind) KindSet { + set := make(KindSet) + var empty struct{} + for _, kind := range kinds { + set[kind] = empty + } + return set +} diff --git a/client/binding/types_test.go b/client/binding/types_test.go new file mode 100644 index 00000000..2de4569e --- /dev/null +++ b/client/binding/types_test.go @@ -0,0 +1,66 @@ +package binding + +import ( + "encoding/json" + "reflect" + "testing" +) + +func TestDeserializeBindingsObject(t *testing.T) { + jsonInput := ` + { + "kinds": [ + "dataset", + "workspace" + ], + "mappings": { + "dataset:Observe Dashboard": { + "tf_local_binding_var": "binding__dashboard_bindings_test_dashboard__dataset_observe_dashboard", + "tf_name": "observe_dashboard" + }, + "dataset:usage/Monitor Messages": { + "tf_local_binding_var": "binding__dashboard_bindings_test_dashboard__dataset_monitor_messages", + "tf_name": "monitor_messages" + } + }, + "workspace": { + "tf_local_binding_var": "binding__dashboard_bindings_test_dashboard__workspace_default", + "tf_name": "default" + }, + "workspace_name": "default" + } + ` + var bindingsObj BindingsObject + err := json.Unmarshal([]byte(jsonInput), &bindingsObj) + if err != nil { + t.Fatal(err) + } + expectedKinds := []Kind{KindDataset, KindWorkspace} + if !reflect.DeepEqual(expectedKinds, bindingsObj.Kinds) { + t.Fatalf("Expected %#v, got %#v", expectedKinds, bindingsObj.Kinds) + } + expectedMappings := Mapping{ + Ref{kind: KindDataset, key: "Observe Dashboard"}: Target{ + TfLocalBindingVar: "binding__dashboard_bindings_test_dashboard__dataset_observe_dashboard", + TfName: "observe_dashboard", + }, + Ref{kind: KindDataset, key: "usage/Monitor Messages"}: Target{ + TfLocalBindingVar: "binding__dashboard_bindings_test_dashboard__dataset_monitor_messages", + TfName: "monitor_messages", + }, + } + if !reflect.DeepEqual(expectedMappings, bindingsObj.Mappings) { + t.Fatalf("Expected %#v, got %#v", expectedMappings, bindingsObj.Mappings) + } + expectedWorkspace := Target{ + TfLocalBindingVar: "binding__dashboard_bindings_test_dashboard__workspace_default", + TfName: "default", + } + if !reflect.DeepEqual(expectedWorkspace, bindingsObj.Workspace) { + t.Fatalf("Expected %#v, got %#v", expectedWorkspace, bindingsObj.Workspace) + } + expectedName := "default" + if bindingsObj.WorkspaceName != expectedName { + t.Fatalf("Expected workspace_name %s, got %s", expectedName, bindingsObj.WorkspaceName) + } +} diff --git a/client/config.go b/client/config.go index 148e6f05..bf717f85 100644 --- a/client/config.go +++ b/client/config.go @@ -52,6 +52,9 @@ type Config struct { // optional traceparent identifier to pass via header TraceParent *string `json:"traceparent"` + + // enable extra queries needed to export bindings + ExportObjectBindings bool `json:"export_object_bindings"` } func (c *Config) Hash() uint64 { diff --git a/client/internal/meta/operation/dataset.graphql b/client/internal/meta/operation/dataset.graphql index 20de36f1..4f41985b 100644 --- a/client/internal/meta/operation/dataset.graphql +++ b/client/internal/meta/operation/dataset.graphql @@ -64,6 +64,11 @@ fragment Dataset on Dataset { } } +fragment DatasetIdName on Dataset { + name + id +} + # @genqlient(for: "DatasetInput.deleted", omitempty: true) # @genqlient(for: "DatasetInput.accelerationDisabled", omitempty: true) # @genqlient(for: "InputDefinitionInput.stageID", omitempty: true) @@ -123,6 +128,15 @@ query listDatasets{ } } +query listDatasetsIdNameOnly { + datasets: datasetSearch { + # @genqlient(flatten: true) + dataset { + ...DatasetIdName + } + } +} + # @genqlient(for: "DatasetFieldTypeInput.nullable", omitempty: true) # @genqlient(for: "DatasetInput.deleted", omitempty: true) # @genqlient(for: "DatasetInput.accelerationDisabled", omitempty: true) diff --git a/client/internal/meta/operation/user.graphql b/client/internal/meta/operation/user.graphql index ca1dfb23..0e85a9a1 100644 --- a/client/internal/meta/operation/user.graphql +++ b/client/internal/meta/operation/user.graphql @@ -1,7 +1,8 @@ fragment User on User { id email - comment + comment + label } query getUser($id: UserId!) { @@ -10,3 +11,12 @@ query getUser($id: UserId!) { ...User } } + +query listUsers { + users: currentCustomer { + # @genqlient(flatten: true) + users { + ...User + } + } +} diff --git a/client/internal/meta/operation/worksheet.graphql b/client/internal/meta/operation/worksheet.graphql index c58ff559..c0e60135 100644 --- a/client/internal/meta/operation/worksheet.graphql +++ b/client/internal/meta/operation/worksheet.graphql @@ -10,6 +10,11 @@ fragment Worksheet on Worksheet { } } +fragment WorksheetIdLabel on Worksheet { + id + label +} + # @genqlient(for: "InputDefinitionInput.stageID", omitempty: true) # @genqlient(for: "InputDefinitionInput.stageId", omitempty: true) # @genqlient(for: "StageQueryInput.stageID", omitempty: true) @@ -37,6 +42,17 @@ query getWorksheet($id: ObjectId!) { } } +query listWorksheetsIdLabelOnly($workspaceId: ObjectId!) { + worksheetSearch: worksheetSearch(terms: { workspaceId: [$workspaceId] }) { + worksheets { + # @genqlient(flatten: true) + worksheet { + ...WorksheetIdLabel + } + } + } +} + mutation deleteWorksheet($id: ObjectId!) { # @genqlient(flatten: true) resultStatus: deleteWorksheet(wks: $id) { diff --git a/client/meta/dataset.go b/client/meta/dataset.go index 4325b964..d84bff9c 100644 --- a/client/meta/dataset.go +++ b/client/meta/dataset.go @@ -69,6 +69,19 @@ func (client *Client) ListDatasets(ctx context.Context) (ds []*Dataset, err erro return result, nil } +func (client *Client) ListDatasetsIdNameOnly(ctx context.Context) (ds []*DatasetIdName, err error) { + resp, err := listDatasetsIdNameOnly(ctx, client.Gql) + if err != nil { + return nil, err + } + result := make([]*DatasetIdName, 0) + for _, ds := range resp.Datasets { + d := ds.Dataset + result = append(result, &d) + } + return result, nil +} + func (client *Client) SaveSourceDataset(ctx context.Context, workspaceId string, input *DatasetDefinitionInput, sourceInput *SourceTableDefinitionInput) (*Dataset, error) { resp, err := saveSourceDataset(ctx, client.Gql, workspaceId, *input, *sourceInput, dep()) return datasetOrError(resp.Dataset, err) diff --git a/client/meta/genqlient.generated.go b/client/meta/genqlient.generated.go index fe513b21..7791cc35 100644 --- a/client/meta/genqlient.generated.go +++ b/client/meta/genqlient.generated.go @@ -1426,6 +1426,18 @@ func (v *DatasetForeignKeysForeignKey) GetSrcFields() []string { return v.SrcFie // GetDstFields returns DatasetForeignKeysForeignKey.DstFields, and is useful for accessing the field via an interface. func (v *DatasetForeignKeysForeignKey) GetDstFields() []string { return v.DstFields } +// DatasetIdName includes the GraphQL fields of Dataset requested by the fragment DatasetIdName. +type DatasetIdName struct { + Name string `json:"name"` + Id string `json:"id"` +} + +// GetName returns DatasetIdName.Name, and is useful for accessing the field via an interface. +func (v *DatasetIdName) GetName() string { return v.Name } + +// GetId returns DatasetIdName.Id, and is useful for accessing the field via an interface. +func (v *DatasetIdName) GetId() string { return v.Id } + type DatasetInput struct { // if id is not specified, a new dataset is created Id *string `json:"id"` @@ -8018,6 +8030,7 @@ type User struct { Id types.UserIdScalar `json:"id"` Email string `json:"email"` Comment *string `json:"comment"` + Label string `json:"label"` } // GetId returns User.Id, and is useful for accessing the field via an interface. @@ -8029,6 +8042,9 @@ func (v *User) GetEmail() string { return v.Email } // GetComment returns User.Comment, and is useful for accessing the field via an interface. func (v *User) GetComment() *string { return v.Comment } +// GetLabel returns User.Label, and is useful for accessing the field via an interface. +func (v *User) GetLabel() string { return v.Label } + // These are the OPAL native types that can go into worksheet parameters. Some // of the native OPAL types aren't (currently?) exposed to the worksheet // parameters, but it's likely we will expand this to the full roster over time. @@ -8153,6 +8169,18 @@ func (v *Worksheet) GetWorkspaceId() string { return v.WorkspaceId } // GetStages returns Worksheet.Stages, and is useful for accessing the field via an interface. func (v *Worksheet) GetStages() []StageQuery { return v.Stages } +// WorksheetIdLabel includes the GraphQL fields of Worksheet requested by the fragment WorksheetIdLabel. +type WorksheetIdLabel struct { + Id string `json:"id"` + Label string `json:"label"` +} + +// GetId returns WorksheetIdLabel.Id, and is useful for accessing the field via an interface. +func (v *WorksheetIdLabel) GetId() string { return v.Id } + +// GetLabel returns WorksheetIdLabel.Label, and is useful for accessing the field via an interface. +func (v *WorksheetIdLabel) GetLabel() string { return v.Label } + type WorksheetInput struct { // if id is not specified, a new worksheet is created Id *string `json:"id"` @@ -9105,6 +9133,14 @@ type __getWorkspaceInput struct { // GetId returns __getWorkspaceInput.Id, and is useful for accessing the field via an interface. func (v *__getWorkspaceInput) GetId() string { return v.Id } +// __listWorksheetsIdLabelOnlyInput is used internally by genqlient +type __listWorksheetsIdLabelOnlyInput struct { + WorkspaceId string `json:"workspaceId"` +} + +// GetWorkspaceId returns __listWorksheetsIdLabelOnlyInput.WorkspaceId, and is useful for accessing the field via an interface. +func (v *__listWorksheetsIdLabelOnlyInput) GetWorkspaceId() string { return v.WorkspaceId } + // __lookupAppInput is used internally by genqlient type __lookupAppInput struct { WorkspaceId string `json:"workspaceId"` @@ -10888,6 +10924,27 @@ type listDatasetsDatasetsProject struct { // GetDatasets returns listDatasetsDatasetsProject.Datasets, and is useful for accessing the field via an interface. func (v *listDatasetsDatasetsProject) GetDatasets() []Dataset { return v.Datasets } +// listDatasetsIdNameOnlyDatasetsDatasetMatch includes the requested fields of the GraphQL type DatasetMatch. +type listDatasetsIdNameOnlyDatasetsDatasetMatch struct { + Dataset DatasetIdName `json:"dataset"` +} + +// GetDataset returns listDatasetsIdNameOnlyDatasetsDatasetMatch.Dataset, and is useful for accessing the field via an interface. +func (v *listDatasetsIdNameOnlyDatasetsDatasetMatch) GetDataset() DatasetIdName { return v.Dataset } + +// listDatasetsIdNameOnlyResponse is returned by listDatasetsIdNameOnly on success. +type listDatasetsIdNameOnlyResponse struct { + // searchMode defaults to InclusiveMode, which means "any matches, counts" sorted by better-scoring. + // If you pass in ExclusiveMode, then you get "must match each thing" behavior, which may end up + // returning no datasets at all quite easily. + Datasets []listDatasetsIdNameOnlyDatasetsDatasetMatch `json:"datasets"` +} + +// GetDatasets returns listDatasetsIdNameOnlyResponse.Datasets, and is useful for accessing the field via an interface. +func (v *listDatasetsIdNameOnlyResponse) GetDatasets() []listDatasetsIdNameOnlyDatasetsDatasetMatch { + return v.Datasets +} + // listDatasetsResponse is returned by listDatasets on success. type listDatasetsResponse struct { Datasets []listDatasetsDatasetsProject `json:"datasets"` @@ -10896,6 +10953,52 @@ type listDatasetsResponse struct { // GetDatasets returns listDatasetsResponse.Datasets, and is useful for accessing the field via an interface. func (v *listDatasetsResponse) GetDatasets() []listDatasetsDatasetsProject { return v.Datasets } +// listUsersResponse is returned by listUsers on success. +type listUsersResponse struct { + Users *listUsersUsersCustomer `json:"users"` +} + +// GetUsers returns listUsersResponse.Users, and is useful for accessing the field via an interface. +func (v *listUsersResponse) GetUsers() *listUsersUsersCustomer { return v.Users } + +// listUsersUsersCustomer includes the requested fields of the GraphQL type Customer. +type listUsersUsersCustomer struct { + Users []User `json:"users"` +} + +// GetUsers returns listUsersUsersCustomer.Users, and is useful for accessing the field via an interface. +func (v *listUsersUsersCustomer) GetUsers() []User { return v.Users } + +// listWorksheetsIdLabelOnlyResponse is returned by listWorksheetsIdLabelOnly on success. +type listWorksheetsIdLabelOnlyResponse struct { + WorksheetSearch listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapper `json:"worksheetSearch"` +} + +// GetWorksheetSearch returns listWorksheetsIdLabelOnlyResponse.WorksheetSearch, and is useful for accessing the field via an interface. +func (v *listWorksheetsIdLabelOnlyResponse) GetWorksheetSearch() listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapper { + return v.WorksheetSearch +} + +// listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapper includes the requested fields of the GraphQL type WorksheetSearchResultWrapper. +type listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapper struct { + Worksheets []listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapperWorksheetsWorksheetSearchResult `json:"worksheets"` +} + +// GetWorksheets returns listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapper.Worksheets, and is useful for accessing the field via an interface. +func (v *listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapper) GetWorksheets() []listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapperWorksheetsWorksheetSearchResult { + return v.Worksheets +} + +// listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapperWorksheetsWorksheetSearchResult includes the requested fields of the GraphQL type WorksheetSearchResult. +type listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapperWorksheetsWorksheetSearchResult struct { + Worksheet WorksheetIdLabel `json:"worksheet"` +} + +// GetWorksheet returns listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapperWorksheetsWorksheetSearchResult.Worksheet, and is useful for accessing the field via an interface. +func (v *listWorksheetsIdLabelOnlyWorksheetSearchWorksheetSearchResultWrapperWorksheetsWorksheetSearchResult) GetWorksheet() WorksheetIdLabel { + return v.Worksheet +} + // listWorkspacesResponse is returned by listWorkspaces on success. type listWorkspacesResponse struct { Workspaces []Workspace `json:"workspaces"` @@ -15008,6 +15111,7 @@ fragment User on User { id email comment + label } ` @@ -16831,6 +16935,7 @@ fragment User on User { id email comment + label } ` @@ -17067,6 +17172,125 @@ func listDatasets( return &data, err } +// The query or mutation executed by listDatasetsIdNameOnly. +const listDatasetsIdNameOnly_Operation = ` +query listDatasetsIdNameOnly { + datasets: datasetSearch { + dataset { + ... DatasetIdName + } + } +} +fragment DatasetIdName on Dataset { + name + id +} +` + +func listDatasetsIdNameOnly( + ctx context.Context, + client graphql.Client, +) (*listDatasetsIdNameOnlyResponse, error) { + req := &graphql.Request{ + OpName: "listDatasetsIdNameOnly", + Query: listDatasetsIdNameOnly_Operation, + } + var err error + + var data listDatasetsIdNameOnlyResponse + resp := &graphql.Response{Data: &data} + + err = client.MakeRequest( + ctx, + req, + resp, + ) + + return &data, err +} + +// The query or mutation executed by listUsers. +const listUsers_Operation = ` +query listUsers { + users: currentCustomer { + users { + ... User + } + } +} +fragment User on User { + id + email + comment + label +} +` + +func listUsers( + ctx context.Context, + client graphql.Client, +) (*listUsersResponse, error) { + req := &graphql.Request{ + OpName: "listUsers", + Query: listUsers_Operation, + } + var err error + + var data listUsersResponse + resp := &graphql.Response{Data: &data} + + err = client.MakeRequest( + ctx, + req, + resp, + ) + + return &data, err +} + +// The query or mutation executed by listWorksheetsIdLabelOnly. +const listWorksheetsIdLabelOnly_Operation = ` +query listWorksheetsIdLabelOnly ($workspaceId: ObjectId!) { + worksheetSearch(terms: {workspaceId:[$workspaceId]}) { + worksheets { + worksheet { + ... WorksheetIdLabel + } + } + } +} +fragment WorksheetIdLabel on Worksheet { + id + label +} +` + +func listWorksheetsIdLabelOnly( + ctx context.Context, + client graphql.Client, + workspaceId string, +) (*listWorksheetsIdLabelOnlyResponse, error) { + req := &graphql.Request{ + OpName: "listWorksheetsIdLabelOnly", + Query: listWorksheetsIdLabelOnly_Operation, + Variables: &__listWorksheetsIdLabelOnlyInput{ + WorkspaceId: workspaceId, + }, + } + var err error + + var data listWorksheetsIdLabelOnlyResponse + resp := &graphql.Response{Data: &data} + + err = client.MakeRequest( + ctx, + req, + resp, + ) + + return &data, err +} + // The query or mutation executed by listWorkspaces. const listWorkspaces_Operation = ` query listWorkspaces { diff --git a/client/meta/user.go b/client/meta/user.go index 9086fd36..75545641 100644 --- a/client/meta/user.go +++ b/client/meta/user.go @@ -43,6 +43,14 @@ func (client *Client) LookupUser(ctx context.Context, email string) (*User, erro return nil, fmt.Errorf("user not found") } +func (client *Client) ListUsers(ctx context.Context) ([]User, error) { + resp, err := listUsers(ctx, client.Gql) + if err != nil { + return nil, err + } + return resp.Users.Users, nil +} + func (u *User) Oid() *oid.OID { userOid := oid.UserOid(u.Id) return &userOid diff --git a/client/meta/worksheet.go b/client/meta/worksheet.go index 16d31f9a..179aceaa 100644 --- a/client/meta/worksheet.go +++ b/client/meta/worksheet.go @@ -31,6 +31,19 @@ func (client *Client) GetWorksheet(ctx context.Context, id string) (*Worksheet, return worksheetOrError(resp, err) } +func (client *Client) ListWorksheetIdLabelOnly(ctx context.Context, workspaceId string) ([]*WorksheetIdLabel, error) { + resp, err := listWorksheetsIdLabelOnly(ctx, client.Gql, workspaceId) + if err != nil { + return nil, err + } + result := make([]*WorksheetIdLabel, 0) + for _, wks := range resp.WorksheetSearch.Worksheets { + sheet := wks.Worksheet + result = append(result, &sheet) + } + return result, nil +} + func (client *Client) DeleteWorksheet(ctx context.Context, id string) error { resp, err := deleteWorksheet(ctx, client.Gql, id) return optionalResultStatusError(resp, err) diff --git a/docs/index.md b/docs/index.md index 18b721ed..d2e97c71 100644 --- a/docs/index.md +++ b/docs/index.md @@ -86,6 +86,7 @@ terraform plan - `api_token` (String, Sensitive) An Observe API Token. Used for authenticating requests to API in the absence of `user_email` and `user_password`. - `domain` (String) Observe API domain. Defaults to `observeinc.com`. +- `export_object_bindings` (Boolean) Enable generating object ID-name bindings for cross-tenant export/import (internal use). - `flags` (String) Toggle experimental features. - `http_client_timeout` (String) HTTP client timeout. Defaults to 2m. - `insecure` (Boolean) Skip TLS certificate validation. diff --git a/observe/data_source_dashboard.go b/observe/data_source_dashboard.go index b5b7589e..61fb7c90 100644 --- a/observe/data_source_dashboard.go +++ b/observe/data_source_dashboard.go @@ -82,5 +82,5 @@ func dataSourceDashboardRead(ctx context.Context, data *schema.ResourceData, met } data.SetId(ws.Id) - return dashboardToResourceData(ws, data) + return dashboardToResourceData(ctx, ws, data, client, true) } diff --git a/observe/data_source_dashboard_test.go b/observe/data_source_dashboard_test.go index 1f1bb969..9f87f072 100644 --- a/observe/data_source_dashboard_test.go +++ b/observe/data_source_dashboard_test.go @@ -1,11 +1,14 @@ package observe import ( + "encoding/json" "fmt" + "reflect" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/observeinc/terraform-provider-observe/client/binding" ) func TestAccObserveSourceDashboard(t *testing.T) { @@ -109,3 +112,234 @@ func TestAccObserveSourceDashboard_ExportNullParameter(t *testing.T) { }, }) } + +func TestAccObserveSourceDashboard_ExportWithBindings(t *testing.T) { + randomPrefix := acctest.RandomWithPrefix("tf") + // this is really nasty, but basically if the hashicorp terraform provider testing + // framework detects a terraform block, it will output the config verbatim instead of + // trying to insert another resource. their logic is literally `strings.Contains(s.Config, "terraform {")` + // (hashicorp/terraform-plugin-sdk/v2/helper/resource/teststep_providers.go:24), so + // there must be a space between the "terraform" and the "{" + providerPreamble := ` + terraform {} # trick the testing framework into not mangling our config + provider "observe" { + export_object_bindings = true + } + ` + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(providerPreamble+configPreamble+datastreamConfigPreamble+` + data "observe_oid" "dataset" { + oid = observe_datastream.test.dataset + } + + resource "observe_dashboard" "first" { + workspace = data.observe_workspace.default.oid + name = "%[1]s" + icon_url = "test" + layout = jsonencode({ + datasetId = data.observe_oid.dataset.id + }) + stages = <<-EOF + [{ + "pipeline": "filter field = \"cpu_usage_core_seconds\"\ncolmake cpu_used: value - lag(value, 1), groupby(clusterUid, namespace, podName, containerName)\ncolmake cpu_used: case(\n cpu_used < 0, value, // stream reset for cumulativeCounter metric\n true, cpu_used)\ncoldrop field, value", + "input": [{ + "inputName": "kubernetes/metrics/Container Metrics", + "inputRole": "Data", + "datasetId": "${data.observe_oid.dataset.id}" + }] + }] + EOF + + parameters = jsonencode([ + { + defaultValue = { + link = null + } + id = "emptylink" + name = "Empty Link" + valueKind = { + type = "LINK" + keyForDatasetId = data.observe_oid.dataset.id + } + }, + ]) + } + + data "observe_dashboard" "lookup" { + id = observe_dashboard.first.id + } + `, randomPrefix), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrWith("data.observe_dashboard.lookup", "layout", func(val string) error { + // check that we can deserialize a bindings object from the layout + // field + var bindings struct { + DatasetId string `json:"datasetId"` + Bindings binding.BindingsObject `json:"bindings"` + } + if err := json.Unmarshal([]byte(val), &bindings); err != nil { + return err + } + expectedKinds := []binding.Kind{binding.KindDataset, binding.KindWorkspace} + if !reflect.DeepEqual(bindings.Bindings.Kinds, expectedKinds) { + return fmt.Errorf("bindings.Kind does not match: Expected %#v, got %#v", expectedKinds, bindings.Bindings.Kinds) + } + expectedId := fmt.Sprintf("${local.binding__dashboard_%[1]s__dataset_%[1]s}", randomPrefix) + if bindings.DatasetId != expectedId { + return fmt.Errorf("layout.datasetId does not match: Expected %#v, got %#v", expectedKinds, bindings.Bindings.Kinds) + } + return nil + }), + resource.TestCheckResourceAttrWith("data.observe_dashboard.lookup", "stages", func(val string) error { + var stagesPartial []struct { + Input []struct { + DatasetId string `json:"datasetId"` + } `json:"input"` + } + if err := json.Unmarshal([]byte(val), &stagesPartial); err != nil { + return err + } + expectedId := fmt.Sprintf("${local.binding__dashboard_%[1]s__dataset_%[1]s}", randomPrefix) + actualId := stagesPartial[0].Input[0].DatasetId + if actualId != expectedId { + return fmt.Errorf("expected %#v, got %#v", expectedId, actualId) + } + return nil + }), + resource.TestCheckResourceAttrWith("data.observe_dashboard.lookup", "parameters", func(val string) error { + var parametersPartial []struct { + ValueKind struct { + KeyForDatasetId string `json:"keyForDatasetId"` + } `json:"valueKind"` + } + if err := json.Unmarshal([]byte(val), ¶metersPartial); err != nil { + return err + } + expected_id := fmt.Sprintf("${local.binding__dashboard_%[1]s__dataset_%[1]s}", randomPrefix) + actual_id := parametersPartial[0].ValueKind.KeyForDatasetId + if actual_id != expected_id { + return fmt.Errorf("expected %#v, got %#v", expected_id, actual_id) + } + return nil + }), + ), + }, + }, + }) +} + +func TestAccObserveSourceDashboard_ExportWithBindingsEmptyLayout(t *testing.T) { + randomPrefix := acctest.RandomWithPrefix("tf") + // this is really nasty, but basically if the hashicorp terraform provider testing + // framework detects a terraform block, it will output the config verbatim instead of + // trying to insert another resource. their logic is literally `strings.Contains(s.Config, "terraform {")` + // (hashicorp/terraform-plugin-sdk/v2/helper/resource/teststep_providers.go:24), so + // there must be a space between the "terraform" and the "{" + providerPreamble := ` + terraform {} # trick the testing framework into not mangling our config + provider "observe" { + export_object_bindings = true + } + ` + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(providerPreamble+configPreamble+datastreamConfigPreamble+` + data "observe_oid" "dataset" { + oid = observe_datastream.test.dataset + } + + resource "observe_dashboard" "first" { + workspace = data.observe_workspace.default.oid + name = "%[1]s" + icon_url = "test" + stages = <<-EOF + [{ + "pipeline": "filter field = \"cpu_usage_core_seconds\"\ncolmake cpu_used: value - lag(value, 1), groupby(clusterUid, namespace, podName, containerName)\ncolmake cpu_used: case(\n cpu_used < 0, value, // stream reset for cumulativeCounter metric\n true, cpu_used)\ncoldrop field, value", + "input": [{ + "inputName": "kubernetes/metrics/Container Metrics", + "inputRole": "Data", + "datasetId": "${data.observe_oid.dataset.id}" + }] + }] + EOF + + parameters = jsonencode([ + { + defaultValue = { + link = null + } + id = "emptylink" + name = "Empty Link" + valueKind = { + type = "LINK" + keyForDatasetId = data.observe_oid.dataset.id + } + }, + ]) + } + + data "observe_dashboard" "lookup" { + id = observe_dashboard.first.id + } + `, randomPrefix), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrWith("data.observe_dashboard.lookup", "layout", func(val string) error { + // check that we can deserialize a bindings object from the layout + // field + var bindings struct { + DatasetId string `json:"datasetId"` + Bindings binding.BindingsObject `json:"bindings"` + } + if err := json.Unmarshal([]byte(val), &bindings); err != nil { + return err + } + expectedKinds := []binding.Kind{binding.KindDataset, binding.KindWorkspace} + if !reflect.DeepEqual(bindings.Bindings.Kinds, expectedKinds) { + return fmt.Errorf("bindings.Kind does not match: Expected %#v, got %#v", expectedKinds, bindings.Bindings.Kinds) + } + return nil + }), + resource.TestCheckResourceAttrWith("data.observe_dashboard.lookup", "stages", func(val string) error { + var stagesPartial []struct { + Input []struct { + DatasetId string `json:"datasetId"` + } `json:"input"` + } + if err := json.Unmarshal([]byte(val), &stagesPartial); err != nil { + return err + } + expectedId := fmt.Sprintf("${local.binding__dashboard_%[1]s__dataset_%[1]s}", randomPrefix) + actualId := stagesPartial[0].Input[0].DatasetId + if actualId != expectedId { + return fmt.Errorf("expected %#v, got %#v", expectedId, actualId) + } + return nil + }), + resource.TestCheckResourceAttrWith("data.observe_dashboard.lookup", "parameters", func(val string) error { + var parametersPartial []struct { + ValueKind struct { + KeyForDatasetId string `json:"keyForDatasetId"` + } `json:"valueKind"` + } + if err := json.Unmarshal([]byte(val), ¶metersPartial); err != nil { + return err + } + expected_id := fmt.Sprintf("${local.binding__dashboard_%[1]s__dataset_%[1]s}", randomPrefix) + actual_id := parametersPartial[0].ValueKind.KeyForDatasetId + if actual_id != expected_id { + return fmt.Errorf("expected %#v, got %#v", expected_id, actual_id) + } + return nil + }), + ), + }, + }, + }) +} diff --git a/observe/provider.go b/observe/provider.go index 65a721ec..4c09a07c 100644 --- a/observe/provider.go +++ b/observe/provider.go @@ -116,6 +116,12 @@ func Provider() *schema.Provider { Optional: true, Description: "ID of an Observe object that serves as the parent (managing) object for all resources created by the provider (internal use).", }, + "export_object_bindings": { + Type: schema.TypeBool, + DefaultFunc: schema.EnvDefaultFunc("OBSERVE_EXPORT_OBJECT_BINDINGS", false), + Optional: true, + Description: "Enable generating object ID-name bindings for cross-tenant export/import (internal use).", + }, }, DataSourcesMap: map[string]*schema.Resource{ @@ -262,6 +268,10 @@ func getConfigureContextFunc(userAgent func() string) schema.ConfigureContextFun config.ManagingObjectID = &managingId } + if v, ok := data.GetOk("export_object_bindings"); ok { + config.ExportObjectBindings = v.(bool) + } + // trace identifier to attach to all HTTP requests in the traceparent header // refer https://www.w3.org/TR/trace-context/#traceparent-header if traceparent := os.Getenv("TRACEPARENT"); traceparent != "" { diff --git a/observe/resource_dashboard.go b/observe/resource_dashboard.go index 4a113d10..0eb9a570 100644 --- a/observe/resource_dashboard.go +++ b/observe/resource_dashboard.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" observe "github.com/observeinc/terraform-provider-observe/client" + "github.com/observeinc/terraform-provider-observe/client/binding" gql "github.com/observeinc/terraform-provider-observe/client/meta" "github.com/observeinc/terraform-provider-observe/client/meta/types" "github.com/observeinc/terraform-provider-observe/client/oid" @@ -141,8 +142,15 @@ func newDashboardConfig(data *schema.ResourceData) (input *gql.DashboardInput, d return input, diags } -func dashboardToResourceData(d *gql.Dashboard, data *schema.ResourceData) (diags diag.Diagnostics) { - if err := data.Set("workspace", oid.WorkspaceOid(d.WorkspaceId).String()); err != nil { +func dashboardToResourceData(ctx context.Context, d *gql.Dashboard, data *schema.ResourceData, + client *observe.Client, genBindings bool) (diags diag.Diagnostics) { + bindFor := binding.NewKindSet(binding.KindDataset, binding.KindWorkspace) + gen, err := binding.NewGenerator(ctx, genBindings, "dashboard", d.Name, client, bindFor) + if err != nil { + return diag.Errorf("Failed to initialize binding generator: %s", err.Error()) + } + + if err := data.Set("workspace", gen.TryBind(binding.KindWorkspace, oid.WorkspaceOid(d.WorkspaceId).String())); err != nil { diags = append(diags, diag.FromErr(err)...) } @@ -176,21 +184,21 @@ func dashboardToResourceData(d *gql.Dashboard, data *schema.ResourceData) (diags if stagesRaw, err := json.Marshal(d.Stages); err != nil { diagErr := fmt.Errorf("failed to parse 'stages' response field: %w", err) diags = append(diags, diag.FromErr(diagErr)...) + } else if stagesRaw, err := gen.GenerateJson(stagesRaw); err != nil { + diagErr := fmt.Errorf("failed to generate bindings for 'stages' response field: %w", err) + diags = append(diags, diag.FromErr(diagErr)...) } else if err := data.Set("stages", string(stagesRaw)); err != nil { diags = append(diags, diag.FromErr(err)...) } } - if d.Layout != nil { - if err := data.Set("layout", d.Layout); err != nil { - diags = append(diags, diag.FromErr(err)...) - } - } - if d.Parameters != nil { if parametersRaw, err := json.Marshal(d.Parameters); err != nil { diagErr := fmt.Errorf("failed to parse 'parameters' response field: %w", err) diags = append(diags, diag.FromErr(diagErr)...) + } else if parametersRaw, err := gen.GenerateJson(parametersRaw); err != nil { + diagErr := fmt.Errorf("failed to generate bindings for 'parameters' response field: %w", err) + diags = append(diags, diag.FromErr(diagErr)...) } else if err := data.Set("parameters", string(parametersRaw)); err != nil { diags = append(diags, diag.FromErr(err)...) } @@ -200,11 +208,36 @@ func dashboardToResourceData(d *gql.Dashboard, data *schema.ResourceData) (diags if parameterValuesRaw, err := json.Marshal(d.ParameterValues); err != nil { diagErr := fmt.Errorf("failed to parse 'parameter_values' response field: %w", err) diags = append(diags, diag.FromErr(diagErr)...) + } else if parameterValuesRaw, err := gen.GenerateJson(parameterValuesRaw); err != nil { + diagErr := fmt.Errorf("failed to generate bindings for 'parameterValuesRaw' response field: %w", err) + diags = append(diags, diag.FromErr(diagErr)...) } else if err := data.Set("parameter_values", string(parameterValuesRaw)); err != nil { diags = append(diags, diag.FromErr(err)...) } } + if d.Layout != nil || gen.Enabled { + if d.Layout == nil { + empty := types.JsonObject("{}") + d.Layout = &empty + } + if layout, err := d.Layout.MarshalJSON(); err != nil { + diagErr := fmt.Errorf("failed to parse 'layout' response field: %w", err) + diags = append(diags, diag.FromErr(diagErr)...) + } else if layout, err := gen.GenerateJson(layout); err != nil { + diagErr := fmt.Errorf("failed to generate bindings for 'layout' response field: %w", err) + diags = append(diags, diag.FromErr(diagErr)...) + } else { + layoutJson := types.JsonObject(string(layout)) + if layout, err := gen.InsertBindingsObjectJson(&layoutJson); err != nil { + diags = append(diags, diag.FromErr(err)...) + } else if err := data.Set("layout", layout); err != nil { + diags = append(diags, diag.FromErr(err)...) + } + } + + } + if err := data.Set("oid", d.Oid().String()); err != nil { diags = append(diags, diag.FromErr(err)...) } @@ -249,7 +282,7 @@ func resourceDashboardRead(ctx context.Context, data *schema.ResourceData, meta }) } - return dashboardToResourceData(result, data) + return dashboardToResourceData(ctx, result, data, client, false) } func resourceDashboardUpdate(ctx context.Context, data *schema.ResourceData, meta interface{}) (diags diag.Diagnostics) { @@ -270,7 +303,7 @@ func resourceDashboardUpdate(ctx context.Context, data *schema.ResourceData, met return diags } - return dashboardToResourceData(result, data) + return dashboardToResourceData(ctx, result, data, client, false) } func resourceDashboardDelete(ctx context.Context, data *schema.ResourceData, meta interface{}) (diags diag.Diagnostics) {