diff --git a/internal/db/schema/migrations/oss/postgres/86/01_server_worker_local_storage_state.up.sql b/internal/db/schema/migrations/oss/postgres/86/01_server_worker_local_storage_state.up.sql index 03b5afc4b3..5c6e4372e0 100644 --- a/internal/db/schema/migrations/oss/postgres/86/01_server_worker_local_storage_state.up.sql +++ b/internal/db/schema/migrations/oss/postgres/86/01_server_worker_local_storage_state.up.sql @@ -36,6 +36,7 @@ alter table server_worker on update cascade; drop view server_worker_aggregate; +-- Updated in 99/01_split_worker_tag_tables.up.sql -- Replaces view created in 52/01_worker_operational_state.up.sql to add the worker local storage state create view server_worker_aggregate as with worker_config_tags(worker_id, source, tags) as ( diff --git a/internal/db/schema/migrations/oss/postgres/99/01_split_worker_tag_table.up.sql b/internal/db/schema/migrations/oss/postgres/99/01_split_worker_tag_table.up.sql new file mode 100644 index 0000000000..d2d34dd9c0 --- /dev/null +++ b/internal/db/schema/migrations/oss/postgres/99/01_split_worker_tag_table.up.sql @@ -0,0 +1,89 @@ +-- Copyright (c) HashiCorp, Inc. +-- SPDX-License-Identifier: BUSL-1.1 + +begin; + +-- Create new tables for worker config and worker api tags +create table server_worker_config_tag( + worker_id wt_public_id + constraint server_worker_fkey + references server_worker (public_id) + on delete cascade + on update cascade, + key wt_tagpair, + value wt_tagpair, + primary key (worker_id, key, value) +); +comment on table server_worker_config_tag is + 'server_worker_config_tag is a table where each row represents a worker config tag.'; + +create table server_worker_api_tag( + worker_id wt_public_id + constraint server_worker_fkey + references server_worker (public_id) + on delete cascade + on update cascade, + key wt_tagpair, + value wt_tagpair, + primary key (worker_id, key, value) +); +comment on table server_worker_api_tag is + 'server_worker_api_tag is a table where each row represents a worker api tag.'; + +-- Migrate from server_worker_tag to the new tables + insert into server_worker_config_tag + (worker_id, key, value) + select worker_id, key, value + from server_worker_tag + where source = 'configuration'; + + insert into server_worker_api_tag + (worker_id, key, value) + select worker_id, key, value + from server_worker_tag + where source = 'api'; + + +drop view server_worker_aggregate; +-- Replaces view created in 86/01_server_worker_local_storage_state.up.sql to use the disparate worker tag tables +-- View also switches to using json_agg to build the tags for consumption +-- TODO this view will be completely dropped in future PRs on this LLB in favor of sql in query.go +create view server_worker_aggregate as + with connection_count (worker_id, count) as ( + select worker_id, + count(1) as count + from session_connection + where closed_reason is null + group by worker_id +) + select w.public_id, + w.scope_id, + w.description, + w.name, + w.address, + w.create_time, + w.update_time, + w.version, + w.last_status_time, + w.type, + w.release_version, + w.operational_state, + w.local_storage_state, + cc.count as active_connection_count, + wt.tags as api_tags, + ct.tags as worker_config_tags + from server_worker w + left join (select worker_id, json_agg(json_build_object('key', key, 'value', value)) as tags from server_worker_api_tag group by worker_id) wt + on w.public_id = wt.worker_id + left join (select worker_id, json_agg(json_build_object('key', key, 'value', value)) as tags from server_worker_config_tag group by worker_id) ct + on w.public_id = ct.worker_id + left join connection_count as cc + on w.public_id = cc.worker_id; +comment on view server_worker_aggregate is + 'server_worker_aggregate contains the worker resource with its worker provided config values and its configuration and api provided tags.'; + +-- Drop the old tables +drop table server_worker_tag; +drop table server_worker_tag_enm; + +commit; \ No newline at end of file diff --git a/internal/db/schema/migrations/oss/postgres_99_01_test.go b/internal/db/schema/migrations/oss/postgres_99_01_test.go new file mode 100644 index 0000000000..cbbf7c1789 --- /dev/null +++ b/internal/db/schema/migrations/oss/postgres_99_01_test.go @@ -0,0 +1,126 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: BUSL-1.1 + +package oss_test + +import ( + "context" + "testing" + + "github.com/hashicorp/boundary/internal/db/common" + "github.com/hashicorp/boundary/internal/db/schema" + "github.com/hashicorp/boundary/testing/dbtest" + "github.com/stretchr/testify/require" +) + +const ( + makeWorkerQuery = "insert into server_worker (public_id, scope_id, type) values ($1, 'global', 'pki')" + insertWorkerTagsQuery = "insert into server_worker_tag (worker_id, key, value, source) values ($1, $2, $3, $4)" + selectWorkerApiTagsQuery = "select key, value from server_worker_api_tag where worker_id = $1" + selectWorkerConfigTagsQuery = "select key, value from server_worker_config_tag where worker_id = $1" +) + +type testWorkerTags struct { + Key string + Value string +} + +func Test_WorkerTagTableSplit(t *testing.T) { + t.Parallel() + require := require.New(t) + const priorMigration = 92001 + const serverEnumMigration = 99001 + dialect := dbtest.Postgres + ctx := context.Background() + + c, u, _, err := dbtest.StartUsingTemplate(dialect, dbtest.WithTemplate(dbtest.Template1)) + require.NoError(err) + t.Cleanup(func() { + require.NoError(c()) + }) + d, err := common.SqlOpen(dialect, u) + require.NoError(err) + + // migration to the prior migration (before the one we want to test) + m, err := schema.NewManager(ctx, schema.Dialect(dialect), d, schema.WithEditions( + schema.TestCreatePartialEditions(schema.Dialect(dialect), schema.PartialEditions{"oss": priorMigration}), + )) + require.NoError(err) + + _, err = m.ApplyMigrations(ctx) + require.NoError(err) + state, err := m.CurrentState(ctx) + require.NoError(err) + want := &schema.State{ + Initialized: true, + Editions: []schema.EditionState{ + { + Name: "oss", + BinarySchemaVersion: priorMigration, + DatabaseSchemaVersion: priorMigration, + DatabaseSchemaState: schema.Equal, + }, + }, + } + require.Equal(want, state) + + // Seed the data + execResult, err := d.ExecContext(ctx, makeWorkerQuery, "test-worker") + require.NoError(err) + rowsAffected, err := execResult.RowsAffected() + require.NoError(err) + require.Equal(int64(1), rowsAffected) + execResult, err = d.ExecContext(ctx, insertWorkerTagsQuery, "test-worker", "key1", "value1", "api") + require.NoError(err) + rowsAffected, err = execResult.RowsAffected() + require.NoError(err) + require.Equal(int64(1), rowsAffected) + execResult, err = d.ExecContext(ctx, insertWorkerTagsQuery, "test-worker", "key2", "value2", "configuration") + require.NoError(err) + rowsAffected, err = execResult.RowsAffected() + require.NoError(err) + require.Equal(int64(1), rowsAffected) + + // now we're ready for the migration we want to test. + m, err = schema.NewManager(ctx, schema.Dialect(dialect), d, schema.WithEditions( + schema.TestCreatePartialEditions(schema.Dialect(dialect), schema.PartialEditions{"oss": serverEnumMigration}), + )) + require.NoError(err) + + _, err = m.ApplyMigrations(ctx) + require.NoError(err) + state, err = m.CurrentState(ctx) + require.NoError(err) + + want = &schema.State{ + Initialized: true, + Editions: []schema.EditionState{ + { + Name: "oss", + BinarySchemaVersion: serverEnumMigration, + DatabaseSchemaVersion: serverEnumMigration, + DatabaseSchemaState: schema.Equal, + }, + }, + } + require.Equal(want, state) + + // Check that the tags have been moved to the new tables + apiTags := new(testWorkerTags) + row := d.QueryRowContext(ctx, selectWorkerApiTagsQuery, "test-worker") + require.NoError(row.Scan( + &apiTags.Key, + &apiTags.Value, + )) + require.Equal("key1", apiTags.Key) + require.Equal("value1", apiTags.Value) + + configTags := new(testWorkerTags) + row = d.QueryRowContext(ctx, selectWorkerConfigTagsQuery, "test-worker") + require.NoError(row.Scan( + &configTags.Key, + &configTags.Value, + )) + require.Equal("key2", configTags.Key) + require.Equal("value2", configTags.Value) +} diff --git a/internal/proto/controller/storage/servers/store/v1/worker.proto b/internal/proto/controller/storage/servers/store/v1/worker.proto index 15e6040b52..ba36fe98a2 100644 --- a/internal/proto/controller/storage/servers/store/v1/worker.proto +++ b/internal/proto/controller/storage/servers/store/v1/worker.proto @@ -80,7 +80,10 @@ message Worker { // WorkerTag is a tag for a worker. The primary key is comprised of the // worker_id, key, value, and source. +// WorkerTag is deprecated- use ApiTag and ConfigTag instead. message WorkerTag { + option deprecated = true; + // worker_id is the public key that key of the worker this tag is for. // @inject_tag: `gorm:"primary_key"` string worker_id = 10; @@ -98,6 +101,38 @@ message WorkerTag { string source = 40; } +// ApiTag is an API tag for a worker. The primary key is comprised of the +// worker_id, key, and value +message ApiTag { + // worker_id is the public key that key of the worker this tag is for. + // @inject_tag: `gorm:"primary_key"` + string worker_id = 10; + + // key is the key of the tag. This must be set. + // @inject_tag: `gorm:"primary_key"` + string key = 20; + + // value is the value + // @inject_tag: `gorm:"primary_key"` + string value = 30; +} + +// ConfigTag is a configuration tag for a worker. The primary key is comprised of the +// worker_id, key, and value +message ConfigTag { + // worker_id is the public key that key of the worker this tag is for. + // @inject_tag: `gorm:"primary_key"` + string worker_id = 10; + + // key is the key of the tag. This must be set. + // @inject_tag: `gorm:"primary_key"` + string key = 20; + + // value is the value + // @inject_tag: `gorm:"primary_key"` + string value = 30; +} + // WorkerStorageBucketCredentialState is a state for a storage bucket credential for a worker. // The primary key is comprised of the worker_id, storage_bucket_credential_id, permission_type. message WorkerStorageBucketCredentialState { diff --git a/internal/server/query.go b/internal/server/query.go index d78953b0e5..446ba9f8f2 100644 --- a/internal/server/query.go +++ b/internal/server/query.go @@ -16,13 +16,17 @@ const ( deleteWhereCreateTimeSql = `create_time < ?` - deleteTagsByWorkerIdSql = ` + deleteApiTagsByWorkerIdSql = ` delete - from server_worker_tag - where - source = ? - and - worker_id = ?` + from server_worker_api_tag + where worker_id = ? + ` + + deleteConfigTagsByWorkerIdSql = ` + delete + from server_worker_config_tag + where worker_id = ? + ` deleteWorkerAuthQuery = ` delete from worker_auth_authorized diff --git a/internal/server/repository_worker.go b/internal/server/repository_worker.go index b51dbefcf1..dc807ff6e4 100644 --- a/internal/server/repository_worker.go +++ b/internal/server/repository_worker.go @@ -358,10 +358,10 @@ func (r *Repository) UpsertWorkerStatus(ctx context.Context, worker *Worker, opt // If we've been told to update tags, we need to clean out old // ones and add new ones. Within the current transaction, simply - // delete all tags for the given worker, then add the new ones + // delete all config tags for the given worker, then add the new ones // we've been sent. if opts.withUpdateTags { - if err := setWorkerTags(ctx, w, workerClone.GetPublicId(), ConfigurationTagSource, workerClone.inputTags); err != nil { + if err := setWorkerConfigTags(ctx, w, workerClone.GetPublicId(), workerClone.inputTags); err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("error setting worker tags")) } } @@ -385,21 +385,19 @@ func (r *Repository) UpsertWorkerStatus(ctx context.Context, worker *Worker, opt return ret, nil } -// setWorkerTags removes all existing tags from the same source and worker id +// setWorkerConfigTags removes all existing config tags from the same source and worker id // and creates new ones based on the ones provided. This function should be // called from inside a db transaction. // Workers/worker tags are intentionally not oplogged. -func setWorkerTags(ctx context.Context, w db.Writer, id string, ts TagSource, tags []*Tag) error { - const op = "server.setWorkerTags" +func setWorkerConfigTags(ctx context.Context, w db.Writer, id string, tags []*Tag) error { + const op = "server.setWorkerConfigTags" switch { - case !ts.isValid(): - return errors.New(ctx, errors.InvalidParameter, op, "invalid tag source provided") case id == "": return errors.New(ctx, errors.InvalidParameter, op, "worker id is empty") case isNil(w): return errors.New(ctx, errors.InvalidParameter, op, "db.Writer is nil") } - _, err := w.Exec(ctx, deleteTagsByWorkerIdSql, []any{ts.String(), id}) + _, err := w.Exec(ctx, deleteConfigTagsByWorkerIdSql, []any{id}) if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("couldn't delete existing tags for worker %q", id))) } @@ -409,16 +407,56 @@ func setWorkerTags(ctx context.Context, w db.Writer, id string, ts TagSource, ta // Otherwise, go through and stage each tuple for insertion // below. if len(tags) > 0 { - uTags := make([]*store.WorkerTag, 0, len(tags)) + uTags := make([]*store.ConfigTag, 0, len(tags)) for _, v := range tags { if v == nil { return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("found nil tag value for worker %s", id)) } - uTags = append(uTags, &store.WorkerTag{ + uTags = append(uTags, &store.ConfigTag{ + WorkerId: id, + Key: v.Key, + Value: v.Value, + }) + } + if err = w.CreateItems(ctx, uTags); err != nil { + return errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("error creating tags for worker %q", id))) + } + } + + return nil +} + +// setWorkerApiTags removes all existing API tags from the same source and worker id +// and creates new ones based on the ones provided. This function should be +// called from inside a db transaction. +// Workers/worker tags are intentionally not oplogged. +func setWorkerApiTags(ctx context.Context, w db.Writer, id string, tags []*Tag) error { + const op = "server.setWorkerApiTags" + switch { + case id == "": + return errors.New(ctx, errors.InvalidParameter, op, "worker id is empty") + case isNil(w): + return errors.New(ctx, errors.InvalidParameter, op, "db.Writer is nil") + } + _, err := w.Exec(ctx, deleteApiTagsByWorkerIdSql, []any{id}) + if err != nil { + return errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("couldn't delete existing tags for worker %q", id))) + } + + // If tags were cleared out entirely, then we'll have nothing + // to do here, e.g., it will result in deletion of all tags. + // Otherwise, go through and stage each tuple for insertion + // below. + if len(tags) > 0 { + uTags := make([]*store.ApiTag, 0, len(tags)) + for _, v := range tags { + if v == nil { + return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("found nil tag value for worker %s", id)) + } + uTags = append(uTags, &store.ApiTag{ WorkerId: id, Key: v.Key, Value: v.Value, - Source: ts.String(), }) } if err = w.CreateItems(ctx, uTags); err != nil { @@ -700,7 +738,7 @@ func (r *Repository) AddWorkerTags(ctx context.Context, workerId string, workerV if rowsUpdated != 1 { return errors.New(ctx, errors.MultipleRecords, op, fmt.Sprintf("updated worker version and %d rows updated", rowsUpdated)) } - err = setWorkerTags(ctx, w, workerId, ApiTagSource, newTags) + err = setWorkerApiTags(ctx, w, workerId, newTags) if err != nil { return errors.Wrap(ctx, err, op) } @@ -742,7 +780,7 @@ func (r *Repository) SetWorkerTags(ctx context.Context, workerId string, workerV if rowsUpdated != 1 { return errors.New(ctx, errors.MultipleRecords, op, fmt.Sprintf("updated worker version and %d rows updated", rowsUpdated)) } - err = setWorkerTags(ctx, w, workerId, ApiTagSource, tags) + err = setWorkerApiTags(ctx, w, workerId, tags) if err != nil { return errors.Wrap(ctx, err, op) } @@ -776,16 +814,15 @@ func (r *Repository) DeleteWorkerTags(ctx context.Context, workerId string, work } rowsDeleted := 0 - deleteTags := make([]*store.WorkerTag, 0, len(tags)) + deleteTags := make([]*store.ApiTag, 0, len(tags)) for _, t := range tags { if t == nil { return db.NoRowsAffected, errors.New(ctx, errors.InvalidParameter, op, "found nil tag value in input") } - deleteTags = append(deleteTags, &store.WorkerTag{ + deleteTags = append(deleteTags, &store.ApiTag{ WorkerId: workerId, Key: t.Key, Value: t.Value, - Source: ApiTagSource.String(), }) } diff --git a/internal/server/store/gorm.go b/internal/server/store/gorm.go index fde046e4cc..7867cd661b 100644 --- a/internal/server/store/gorm.go +++ b/internal/server/store/gorm.go @@ -8,7 +8,12 @@ func (*Controller) TableName() string { return "server_controller" } -// TableName overrides the table name used by WorkerTag to `worker_tag` -func (*WorkerTag) TableName() string { - return "server_worker_tag" +// TableName overrides the table name used by ApiTag +func (w *ApiTag) TableName() string { + return "server_worker_api_tag" +} + +// TableName overrides the table name used by ConfigTag +func (w *ConfigTag) TableName() string { + return "server_worker_config_tag" } diff --git a/internal/server/store/worker.pb.go b/internal/server/store/worker.pb.go index e833d88e51..ec900cc01f 100644 --- a/internal/server/store/worker.pb.go +++ b/internal/server/store/worker.pb.go @@ -204,6 +204,9 @@ func (x *Worker) GetLocalStorageState() string { // WorkerTag is a tag for a worker. The primary key is comprised of the // worker_id, key, value, and source. +// WorkerTag is deprecated- use ApiTag and ConfigTag instead. +// +// Deprecated: Marked as deprecated in controller/storage/servers/store/v1/worker.proto. type WorkerTag struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -283,6 +286,148 @@ func (x *WorkerTag) GetSource() string { return "" } +// ApiTag is an API tag for a worker. The primary key is comprised of the +// worker_id, key, and value +type ApiTag struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // worker_id is the public key that key of the worker this tag is for. + // @inject_tag: `gorm:"primary_key"` + WorkerId string `protobuf:"bytes,10,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty" gorm:"primary_key"` + // key is the key of the tag. This must be set. + // @inject_tag: `gorm:"primary_key"` + Key string `protobuf:"bytes,20,opt,name=key,proto3" json:"key,omitempty" gorm:"primary_key"` + // value is the value + // @inject_tag: `gorm:"primary_key"` + Value string `protobuf:"bytes,30,opt,name=value,proto3" json:"value,omitempty" gorm:"primary_key"` +} + +func (x *ApiTag) Reset() { + *x = ApiTag{} + if protoimpl.UnsafeEnabled { + mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ApiTag) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ApiTag) ProtoMessage() {} + +func (x *ApiTag) ProtoReflect() protoreflect.Message { + mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ApiTag.ProtoReflect.Descriptor instead. +func (*ApiTag) Descriptor() ([]byte, []int) { + return file_controller_storage_servers_store_v1_worker_proto_rawDescGZIP(), []int{2} +} + +func (x *ApiTag) GetWorkerId() string { + if x != nil { + return x.WorkerId + } + return "" +} + +func (x *ApiTag) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *ApiTag) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + +// ConfigTag is a configuration tag for a worker. The primary key is comprised of the +// worker_id, key, and value +type ConfigTag struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // worker_id is the public key that key of the worker this tag is for. + // @inject_tag: `gorm:"primary_key"` + WorkerId string `protobuf:"bytes,10,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty" gorm:"primary_key"` + // key is the key of the tag. This must be set. + // @inject_tag: `gorm:"primary_key"` + Key string `protobuf:"bytes,20,opt,name=key,proto3" json:"key,omitempty" gorm:"primary_key"` + // value is the value + // @inject_tag: `gorm:"primary_key"` + Value string `protobuf:"bytes,30,opt,name=value,proto3" json:"value,omitempty" gorm:"primary_key"` +} + +func (x *ConfigTag) Reset() { + *x = ConfigTag{} + if protoimpl.UnsafeEnabled { + mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ConfigTag) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ConfigTag) ProtoMessage() {} + +func (x *ConfigTag) ProtoReflect() protoreflect.Message { + mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ConfigTag.ProtoReflect.Descriptor instead. +func (*ConfigTag) Descriptor() ([]byte, []int) { + return file_controller_storage_servers_store_v1_worker_proto_rawDescGZIP(), []int{3} +} + +func (x *ConfigTag) GetWorkerId() string { + if x != nil { + return x.WorkerId + } + return "" +} + +func (x *ConfigTag) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *ConfigTag) GetValue() string { + if x != nil { + return x.Value + } + return "" +} + // WorkerStorageBucketCredentialState is a state for a storage bucket credential for a worker. // The primary key is comprised of the worker_id, storage_bucket_credential_id, permission_type. type WorkerStorageBucketCredentialState struct { @@ -312,7 +457,7 @@ type WorkerStorageBucketCredentialState struct { func (x *WorkerStorageBucketCredentialState) Reset() { *x = WorkerStorageBucketCredentialState{} if protoimpl.UnsafeEnabled { - mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[2] + mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -325,7 +470,7 @@ func (x *WorkerStorageBucketCredentialState) String() string { func (*WorkerStorageBucketCredentialState) ProtoMessage() {} func (x *WorkerStorageBucketCredentialState) ProtoReflect() protoreflect.Message { - mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[2] + mi := &file_controller_storage_servers_store_v1_worker_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -338,7 +483,7 @@ func (x *WorkerStorageBucketCredentialState) ProtoReflect() protoreflect.Message // Deprecated: Use WorkerStorageBucketCredentialState.ProtoReflect.Descriptor instead. func (*WorkerStorageBucketCredentialState) Descriptor() ([]byte, []int) { - return file_controller_storage_servers_store_v1_worker_proto_rawDescGZIP(), []int{2} + return file_controller_storage_servers_store_v1_worker_proto_rawDescGZIP(), []int{4} } func (x *WorkerStorageBucketCredentialState) GetWorkerId() string { @@ -436,37 +581,47 @@ var file_controller_storage_servers_store_v1_worker_proto_rawDesc = []byte{ 0x12, 0x2f, 0x0a, 0x13, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0xa0, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x22, 0x68, 0x0a, 0x09, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x54, 0x61, 0x67, 0x12, 0x1b, + 0x65, 0x22, 0x6c, 0x0a, 0x09, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x54, 0x61, 0x67, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x28, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0xb1, 0x02, 0x0a, 0x22, - 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x42, 0x75, 0x63, - 0x6b, 0x65, 0x74, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, - 0x3f, 0x0a, 0x1c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x62, 0x75, 0x63, 0x6b, 0x65, - 0x74, 0x5f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, - 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, 0x19, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x42, 0x75, - 0x63, 0x6b, 0x65, 0x74, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x49, 0x64, - 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x70, 0x65, 0x72, 0x6d, 0x69, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x18, 0x28, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, - 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x18, 0x32, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x73, 0x12, 0x49, 0x0a, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x5f, - 0x61, 0x74, 0x18, 0x3c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x74, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x41, 0x74, 0x42, - 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, - 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, - 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3b, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x02, 0x18, 0x01, 0x22, + 0x4d, 0x0a, 0x06, 0x41, 0x70, 0x69, 0x54, 0x61, 0x67, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, + 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, + 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x14, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x50, + 0x0a, 0x09, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x54, 0x61, 0x67, 0x12, 0x1b, 0x0a, 0x09, 0x77, + 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0xb1, 0x02, 0x0a, 0x22, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x53, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x61, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x77, 0x6f, 0x72, 0x6b, 0x65, + 0x72, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, + 0x65, 0x72, 0x49, 0x64, 0x12, 0x3f, 0x0a, 0x1c, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x5f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, + 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, 0x19, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, + 0x69, 0x61, 0x6c, 0x49, 0x64, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, + 0x70, 0x65, 0x72, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x28, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x32, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x49, 0x0a, 0x0a, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x3c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x76, 0x31, 0x2e, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x41, 0x74, 0x42, 0x3b, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x62, 0x6f, 0x75, + 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3b, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -481,18 +636,20 @@ func file_controller_storage_servers_store_v1_worker_proto_rawDescGZIP() []byte return file_controller_storage_servers_store_v1_worker_proto_rawDescData } -var file_controller_storage_servers_store_v1_worker_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_controller_storage_servers_store_v1_worker_proto_msgTypes = make([]protoimpl.MessageInfo, 5) var file_controller_storage_servers_store_v1_worker_proto_goTypes = []any{ (*Worker)(nil), // 0: controller.storage.servers.store.v1.Worker (*WorkerTag)(nil), // 1: controller.storage.servers.store.v1.WorkerTag - (*WorkerStorageBucketCredentialState)(nil), // 2: controller.storage.servers.store.v1.WorkerStorageBucketCredentialState - (*timestamp.Timestamp)(nil), // 3: controller.storage.timestamp.v1.Timestamp + (*ApiTag)(nil), // 2: controller.storage.servers.store.v1.ApiTag + (*ConfigTag)(nil), // 3: controller.storage.servers.store.v1.ConfigTag + (*WorkerStorageBucketCredentialState)(nil), // 4: controller.storage.servers.store.v1.WorkerStorageBucketCredentialState + (*timestamp.Timestamp)(nil), // 5: controller.storage.timestamp.v1.Timestamp } var file_controller_storage_servers_store_v1_worker_proto_depIdxs = []int32{ - 3, // 0: controller.storage.servers.store.v1.Worker.create_time:type_name -> controller.storage.timestamp.v1.Timestamp - 3, // 1: controller.storage.servers.store.v1.Worker.update_time:type_name -> controller.storage.timestamp.v1.Timestamp - 3, // 2: controller.storage.servers.store.v1.Worker.last_status_time:type_name -> controller.storage.timestamp.v1.Timestamp - 3, // 3: controller.storage.servers.store.v1.WorkerStorageBucketCredentialState.checked_at:type_name -> controller.storage.timestamp.v1.Timestamp + 5, // 0: controller.storage.servers.store.v1.Worker.create_time:type_name -> controller.storage.timestamp.v1.Timestamp + 5, // 1: controller.storage.servers.store.v1.Worker.update_time:type_name -> controller.storage.timestamp.v1.Timestamp + 5, // 2: controller.storage.servers.store.v1.Worker.last_status_time:type_name -> controller.storage.timestamp.v1.Timestamp + 5, // 3: controller.storage.servers.store.v1.WorkerStorageBucketCredentialState.checked_at:type_name -> controller.storage.timestamp.v1.Timestamp 4, // [4:4] is the sub-list for method output_type 4, // [4:4] is the sub-list for method input_type 4, // [4:4] is the sub-list for extension type_name @@ -531,6 +688,30 @@ func file_controller_storage_servers_store_v1_worker_proto_init() { } } file_controller_storage_servers_store_v1_worker_proto_msgTypes[2].Exporter = func(v any, i int) any { + switch v := v.(*ApiTag); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_controller_storage_servers_store_v1_worker_proto_msgTypes[3].Exporter = func(v any, i int) any { + switch v := v.(*ConfigTag); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_controller_storage_servers_store_v1_worker_proto_msgTypes[4].Exporter = func(v any, i int) any { switch v := v.(*WorkerStorageBucketCredentialState); i { case 0: return &v.state @@ -549,7 +730,7 @@ func file_controller_storage_servers_store_v1_worker_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_controller_storage_servers_store_v1_worker_proto_rawDesc, NumEnums: 0, - NumMessages: 3, + NumMessages: 5, NumExtensions: 0, NumServices: 0, }, diff --git a/internal/server/testing.go b/internal/server/testing.go index ae90e56898..91a910c0be 100644 --- a/internal/server/testing.go +++ b/internal/server/testing.go @@ -131,13 +131,12 @@ func TestKmsWorker(t *testing.T, conn *db.DB, wrapper wrapping.Wrapper, opt ...O require.Equal(t, "kms", wrk.Type) if len(opts.withWorkerTags) > 0 { - var tags []*store.WorkerTag + var tags []*store.ConfigTag for _, t := range opts.withWorkerTags { - tags = append(tags, &store.WorkerTag{ + tags = append(tags, &store.ConfigTag{ WorkerId: wrk.GetPublicId(), Key: t.Key, Value: t.Value, - Source: "configuration", }) } require.NoError(t, rw.CreateItems(ctx, tags)) @@ -170,13 +169,12 @@ func TestPkiWorker(t *testing.T, conn *db.DB, wrapper wrapping.Wrapper, opt ...O require.NotNil(t, wrk) if len(opts.withWorkerTags) > 0 { - var tags []*store.WorkerTag + var tags []*store.ConfigTag for _, t := range opts.withWorkerTags { - tags = append(tags, &store.WorkerTag{ + tags = append(tags, &store.ConfigTag{ WorkerId: wrk.GetPublicId(), Key: t.Key, Value: t.Value, - Source: "configuration", }) } require.NoError(t, rw.CreateItems(ctx, tags)) diff --git a/internal/server/worker.go b/internal/server/worker.go index 307f6343d6..8305d64869 100644 --- a/internal/server/worker.go +++ b/internal/server/worker.go @@ -5,7 +5,6 @@ package server import ( "context" - "strings" "github.com/fatih/structs" "github.com/hashicorp/boundary/internal/db/timestamp" @@ -115,8 +114,8 @@ type Worker struct { *store.Worker activeConnectionCount uint32 `gorm:"-"` - apiTags []*Tag `gorm:"-"` - configTags []*Tag `gorm:"-"` + apiTags Tags + configTags Tags // inputTags is not specified to be api or config tags and is not intended // to be read by clients. Since config tags and api tags are applied in @@ -260,13 +259,13 @@ type workerAggregate struct { Version uint32 Type string ReleaseVersion string - ApiTags string + ApiTags Tags ActiveConnectionCount uint32 OperationalState string LocalStorageState string // Config Fields LastStatusTime *timestamp.Timestamp - WorkerConfigTags string + WorkerConfigTags Tags } func (a *workerAggregate) toWorker(ctx context.Context) (*Worker, error) { @@ -289,48 +288,13 @@ func (a *workerAggregate) toWorker(ctx context.Context) (*Worker, error) { }, activeConnectionCount: a.ActiveConnectionCount, RemoteStorageStates: map[string]*plugin.StorageBucketCredentialState{}, + apiTags: a.ApiTags, + configTags: a.WorkerConfigTags, } - tags, err := tagsFromAggregatedTagString(ctx, a.ApiTags) - if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("error parsing config tag string")) - } - worker.apiTags = tags - - tags, err = tagsFromAggregatedTagString(ctx, a.WorkerConfigTags) - if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("error parsing config tag string")) - } - worker.configTags = tags return worker, nil } -// tagsForAggregatedTagString parses a deliminated string in the format returned -// by the database for the server_worker_aggregate view and returns []*Tag. -// The string is in the format of key1Yvalue1Zkey2Yvalue2Zkey3Yvalue3. Y and Z -// ares chosen for deliminators since tag keys and values are restricted from -// having capitalized letters in them. -func tagsFromAggregatedTagString(ctx context.Context, s string) ([]*Tag, error) { - if s == "" { - return nil, nil - } - const op = "server.tagsFromAggregatedTagString" - const aggregateDelimiter = "Z" - const pairDelimiter = "Y" - var tags []*Tag - for _, kv := range strings.Split(s, aggregateDelimiter) { - res := strings.SplitN(kv, pairDelimiter, 3) - if len(res) != 2 { - return nil, errors.New(ctx, errors.Internal, op, "invalid aggregated tag pairs") - } - tags = append(tags, &Tag{ - Key: res[0], - Value: res[1], - }) - } - return tags, nil -} - func (a *workerAggregate) GetPublicId() string { return a.PublicId } diff --git a/internal/server/worker_tag.go b/internal/server/worker_tag.go index f17e6bc671..0a1c0e21cc 100644 --- a/internal/server/worker_tag.go +++ b/internal/server/worker_tag.go @@ -3,6 +3,11 @@ package server +import ( + "encoding/json" + "fmt" +) + // A Tag is a custom key/value pair which can be attached to a Worker. // Multiple Tags may contain the same key and different values in which // case both key/value pairs are valid. Tags can be sourced from either the @@ -13,17 +18,24 @@ type Tag struct { Value string } -type TagSource string - -const ( - ConfigurationTagSource TagSource = "configuration" - ApiTagSource TagSource = "api" -) +// Tags allows us to scan a JSON array of worker tags from the database +type Tags []*Tag -func (t TagSource) isValid() bool { - return t == ConfigurationTagSource || t == ApiTagSource +// Scan scans value into Tags, and implements the sql.Scanner interface +func (t *Tags) Scan(in any) error { + var err error + switch v := in.(type) { + case string: + err = json.Unmarshal([]byte(v), &t) + case []byte: + err = json.Unmarshal(v, &t) + default: + return fmt.Errorf("cannot scan type %T into tags", in) + } + return err } -func (t TagSource) String() string { - return string(t) +// GormDataType gorm common data type (required) +func (t *Tags) GormDataType() string { + return "tags" } diff --git a/internal/server/worker_tags_test.go b/internal/server/worker_tags_test.go index 181580915a..bfb4f8ff54 100644 --- a/internal/server/worker_tags_test.go +++ b/internal/server/worker_tags_test.go @@ -25,49 +25,20 @@ func TestWorkerTags_Create(t *testing.T) { tests := []struct { name string - want *store.WorkerTag + want *store.ApiTag wantCreateErr bool }{ { - name: "success api source", - want: &store.WorkerTag{ + name: "success- api tag", + want: &store.ApiTag{ WorkerId: worker.GetPublicId(), Key: "key", Value: "value", - Source: ApiTagSource.String(), }, }, - { - name: "success config source", - want: &store.WorkerTag{ - WorkerId: worker.GetPublicId(), - Key: "key", - Value: "value", - Source: ConfigurationTagSource.String(), - }, - }, - { - name: "unknown source", - want: &store.WorkerTag{ - WorkerId: worker.GetPublicId(), - Key: "key", - Value: "value", - Source: "unknown", - }, - wantCreateErr: true, - }, - { - name: "no source", - want: &store.WorkerTag{ - WorkerId: worker.GetPublicId(), - Key: "key", - Value: "value", - }, - wantCreateErr: true, - }, { name: "bad worker id", - want: &store.WorkerTag{ + want: &store.ApiTag{ WorkerId: "w_badworkeridthatdoesntexist", Key: "key", Value: "value", @@ -76,7 +47,7 @@ func TestWorkerTags_Create(t *testing.T) { }, { name: "missing worker id", - want: &store.WorkerTag{ + want: &store.ApiTag{ Key: "key", Value: "value", }, @@ -84,7 +55,7 @@ func TestWorkerTags_Create(t *testing.T) { }, { name: "missing key", - want: &store.WorkerTag{ + want: &store.ApiTag{ WorkerId: worker.GetPublicId(), Value: "value", }, @@ -92,7 +63,7 @@ func TestWorkerTags_Create(t *testing.T) { }, { name: "missing value", - want: &store.WorkerTag{ + want: &store.ApiTag{ WorkerId: worker.GetPublicId(), Key: "key", }, @@ -111,6 +82,15 @@ func TestWorkerTags_Create(t *testing.T) { } }) } + + // Create a config tag + configTag := &store.ConfigTag{ + WorkerId: worker.GetPublicId(), + Key: "key", + Value: "value", + } + err := rw.Create(context.Background(), configTag) + assert.NoError(t, err) } func TestRepository_AddWorkerTags(t *testing.T) { @@ -691,7 +671,7 @@ func TestRepository_WorkerTagsConsequent(t *testing.T) { worker, err = repo.LookupWorker(context.Background(), worker.PublicId) require.NoError(err) assert.Equal(uint32(5), worker.Version) - assert.Equal(set, worker.apiTags) + assert.Equal(Tags(nil), worker.apiTags) assert.Equal(0, len(worker.apiTags)) // Ensure config tags are untouched diff --git a/internal/server/worker_test.go b/internal/server/worker_test.go index f24ea2d5a1..7bc175ebce 100644 --- a/internal/server/worker_test.go +++ b/internal/server/worker_test.go @@ -109,11 +109,10 @@ func TestWorkerAggregate(t *testing.T) { w.PublicId = id require.NoError(t, rw.Create(ctx, w)) require.NoError(t, rw.Create(ctx, - &store.WorkerTag{ + &store.ConfigTag{ WorkerId: id, Key: "key", Value: "val", - Source: ConfigurationTagSource.String(), })) got := getAggWorker(id) @@ -122,7 +121,7 @@ func TestWorkerAggregate(t *testing.T) { assert.NotNil(t, got.GetLastStatusTime()) assert.NotNil(t, got.GetReleaseVersion()) assert.Empty(t, got.apiTags) - assert.Equal(t, got.configTags, []*Tag{{Key: "key", Value: "val"}}) + assert.Equal(t, got.configTags, Tags{{Key: "key", Value: "val"}}) }) t.Run("Worker with many config tag", func(t *testing.T) { @@ -134,23 +133,20 @@ func TestWorkerAggregate(t *testing.T) { w.Type = KmsWorkerType.String() w.PublicId = id require.NoError(t, rw.Create(ctx, w)) - require.NoError(t, rw.Create(ctx, &store.WorkerTag{ + require.NoError(t, rw.Create(ctx, &store.ConfigTag{ WorkerId: id, Key: "key", Value: "val", - Source: ConfigurationTagSource.String(), })) - require.NoError(t, rw.Create(ctx, &store.WorkerTag{ + require.NoError(t, rw.Create(ctx, &store.ConfigTag{ WorkerId: id, Key: "key", Value: "val2", - Source: ConfigurationTagSource.String(), })) - require.NoError(t, rw.Create(ctx, &store.WorkerTag{ + require.NoError(t, rw.Create(ctx, &store.ConfigTag{ WorkerId: id, Key: "key2", Value: "val2", - Source: ConfigurationTagSource.String(), })) got := getAggWorker(id) @@ -173,11 +169,10 @@ func TestWorkerAggregate(t *testing.T) { w.PublicId = id require.NoError(t, rw.Create(ctx, w)) require.NoError(t, rw.Create(ctx, - &store.WorkerTag{ + &store.ApiTag{ WorkerId: id, Key: "key", Value: "val", - Source: ApiTagSource.String(), })) got := getAggWorker(id) @@ -185,7 +180,7 @@ func TestWorkerAggregate(t *testing.T) { assert.Equal(t, uint32(1), got.GetVersion()) assert.NotNil(t, got.GetLastStatusTime()) assert.Empty(t, got.GetConfigTags()) - assert.Equal(t, got.apiTags, []*Tag{{Key: "key", Value: "val"}}) + assert.Equal(t, got.apiTags, Tags{{Key: "key", Value: "val"}}) }) // Worker with mix of tag sources @@ -198,23 +193,20 @@ func TestWorkerAggregate(t *testing.T) { w.Type = KmsWorkerType.String() w.PublicId = id require.NoError(t, rw.Create(ctx, w)) - require.NoError(t, rw.Create(ctx, &store.WorkerTag{ + require.NoError(t, rw.Create(ctx, &store.ConfigTag{ WorkerId: id, Key: "key", Value: "val", - Source: ConfigurationTagSource.String(), })) - require.NoError(t, rw.Create(ctx, &store.WorkerTag{ + require.NoError(t, rw.Create(ctx, &store.ApiTag{ WorkerId: id, Key: "key", Value: "val2", - Source: ApiTagSource.String(), })) - require.NoError(t, rw.Create(ctx, &store.WorkerTag{ + require.NoError(t, rw.Create(ctx, &store.ApiTag{ WorkerId: id, Key: "key2", Value: "val2", - Source: ApiTagSource.String(), })) got := getAggWorker(id)